diff --git a/.cookiecutter.json b/.cookiecutter.json index dbce566c3..020ae61d4 100644 --- a/.cookiecutter.json +++ b/.cookiecutter.json @@ -21,15 +21,16 @@ "_drift_manager": { "template": "https://github.com/nautobot/cookiecutter-nautobot-app.git", "template_dir": "nautobot-app", - "template_ref": "refs/tags/nautobot-app-v2.3.2", + "template_ref": "refs/tags/nautobot-app-v2.4.0", "cookie_dir": "", "branch_prefix": "drift-manager", "pull_request_strategy": "create", "post_actions": [ - "black" + "ruff", + "poetry" ], - "draft": true, - "baked_commit_ref": "022954d51f46c54813d3c8c81584a16f0faea1f5" + "draft": false, + "baked_commit_ref": "671ef8a64a7bc40e991ade1dd64560d734f122bc" } } } diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 7ac44267f..cd890a910 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,6 +2,7 @@ * @nautobot/plugin-ssot /nautobot_ssot/integrations/aci/ @chadell @nautobot/plugin-ssot /nautobot_ssot/integrations/aristacv/ @qduk @jdrew82 @nautobot/plugin-ssot +/nautobot_ssot/integrations/bootstrap/ @bile0026 @nautobot/plugin-ssot /nautobot_ssot/integrations/device42/ @jdrew82 @nautobot/plugin-ssot /nautobot_ssot/integrations/infoblox/ @qduk @jdrew82 @nautobot/plugin-ssot /nautobot_ssot/integrations/ipfabric/ @alhogan @nautobot/plugin-ssot diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0a41f6411..92f1bdba1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: - name: "Setup environment" uses: "networktocode/gh-action-setup-poetry-environment@v6" - name: "Linting: ruff" - run: "poetry run invoke ruff" + run: "poetry run invoke ruff --action lint" check-docs-build: runs-on: "ubuntu-22.04" env: @@ -146,6 +146,10 @@ jobs: uses: "actions/checkout@v4" - name: "Setup environment" uses: "networktocode/gh-action-setup-poetry-environment@v6" + - name: "Constrain Nautobot version and regenerate lock file" + env: + INVOKE_NAUTOBOT_SSOT_LOCAL: "true" + run: "poetry run invoke lock --constrain-nautobot-ver --constrain-python-ver" - name: "Set up Docker Buildx" id: "buildx" uses: "docker/setup-buildx-action@v3" @@ -172,9 +176,9 @@ jobs: - name: "Run Tests" run: "poetry run invoke unittest" changelog: - if: | + if: > contains(fromJson('["develop","ltm-1.6"]'), github.base_ref) && - (github.head_ref != 'main') + (github.head_ref != 'main') && (!startsWith(github.head_ref, 'release')) runs-on: "ubuntu-22.04" steps: - name: "Check out repository code" diff --git a/.github/workflows/upstream_testing.yml b/.github/workflows/upstream_testing.yml index 5351934f6..535e66636 100644 --- a/.github/workflows/upstream_testing.yml +++ b/.github/workflows/upstream_testing.yml @@ -4,10 +4,11 @@ name: "Nautobot Upstream Monitor" on: # yamllint disable-line rule:truthy rule:comments schedule: - cron: "0 4 */2 * *" # every other day at midnight + workflow_dispatch: jobs: upstream-test: uses: "nautobot/nautobot/.github/workflows/plugin_upstream_testing_base.yml@develop" with: # Below could potentially be collapsed into a single argument if a concrete relationship between both is enforced invoke_context_name: "NAUTOBOT_SSOT" - plugin_name: "nautobot-app-ssot" + plugin_name: "nautobot-ssot" diff --git a/README.md b/README.md index 61bae8cdd..bdca4711d 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,7 @@ This Nautobot application framework includes the following integrations: - Infoblox - IPFabric - Itential +- Cisco Meraki - ServiceNow Read more about integrations [here](https://docs.nautobot.com/projects/ssot/en/latest/user/integrations). To enable and configure integrations follow the instructions from [the install guide](https://docs.nautobot.com/projects/ssot/en/latest/admin/install/#integrations-configuration). @@ -42,16 +43,19 @@ Read more about integrations [here](https://docs.nautobot.com/projects/ssot/en/l --- The dashboard view of the app. + ![Dashboard View](https://raw.githubusercontent.com/nautobot/nautobot-app-ssot/develop/docs/images/dashboard_initial.png) --- The detailed view of the example data source that is prepackaged within this app. + ![Data Source Detail View](https://raw.githubusercontent.com/nautobot/nautobot-app-ssot/develop/docs/images/data_source_detail.png) --- The detailed view of an executed sync. + ![Sync Detail View](https://raw.githubusercontent.com/nautobot/nautobot-app-ssot/develop/docs/images/sync_detail.png) --- @@ -83,6 +87,7 @@ The SSoT framework includes a number of integrations with external Systems of Re * Cisco DNA Center * Infoblox * Itential +* Cisco Meraki * ServiceNow > Note that the Arista CloudVision integration is currently incompatible with the [Arista Labs](https://labs.arista.com/) environment due to a TLS issue. It has been confirmed to work in on-prem environments previously. diff --git a/changes/536.housekeeping b/changes/536.housekeeping deleted file mode 100644 index 0d9691c7d..000000000 --- a/changes/536.housekeeping +++ /dev/null @@ -1 +0,0 @@ -Cut release for 3.1.0 \ No newline at end of file diff --git a/development/Dockerfile b/development/Dockerfile index c689be7dd..905b286fe 100644 --- a/development/Dockerfile +++ b/development/Dockerfile @@ -54,14 +54,16 @@ WORKDIR /source COPY . /source # Build args must be declared in each stage +ARG NAUTOBOT_VER ARG PYTHON_VER -# Constrain the Nautobot version to NAUTOBOT_VER +# Constrain the Nautobot version to NAUTOBOT_VER, fall back to installing from git branch if not available on PyPi # In CI, this should be done outside of the Dockerfile to prevent cross-compile build failures ARG CI RUN if [ -z "${CI+x}" ]; then \ INSTALLED_NAUTOBOT_VER=$(pip show nautobot | grep "^Version" | sed "s/Version: //"); \ - poetry add --lock nautobot@${INSTALLED_NAUTOBOT_VER} --python ${PYTHON_VER}; fi + poetry add --lock nautobot@${INSTALLED_NAUTOBOT_VER} --python ${PYTHON_VER} || \ + poetry add --lock git+https://github.com/nautobot/nautobot.git#${NAUTOBOT_VER} --python ${PYTHON_VER}; fi # Install the app RUN poetry install --extras all --with dev diff --git a/development/app_config_schema.py b/development/app_config_schema.py index e52e24786..a779b14ef 100644 --- a/development/app_config_schema.py +++ b/development/app_config_schema.py @@ -40,9 +40,7 @@ def _main(): **SchemaBuilder().to_json_schema(app_config), # type: ignore } app_config = import_module(package_name).config - _enrich_object_schema( - schema, app_config.default_settings, app_config.required_settings - ) + _enrich_object_schema(schema, app_config.default_settings, app_config.required_settings) schema_path.write_text(json.dumps(schema, indent=4) + "\n") print(f"\n==================\nGenerated schema:\n\n{schema_path}\n") print( diff --git a/development/creds.example.env b/development/creds.example.env index 5feb6c71c..45757d455 100644 --- a/development/creds.example.env +++ b/development/creds.example.env @@ -48,3 +48,10 @@ NAUTOBOT_APIC_VERIFY_DEVNET=False SERVICENOW_PASSWORD="changeme" IPFABRIC_API_TOKEN=secrettoken + +NAUTOBOT_SSOT_ENABLE_BOOTSTRAP="False" +NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH=develop +NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE=file # or git + +MERAKI_ORG_ID='123456' +MERAKI_TOKEN='vtx01710aa0fn452740055y1hs60ns8c107ho168' diff --git a/development/development.env b/development/development.env index 39ffc1aeb..04f1bbfb3 100644 --- a/development/development.env +++ b/development/development.env @@ -7,6 +7,7 @@ NAUTOBOT_BANNER_TOP="Local" NAUTOBOT_CHANGELOG_RETENTION=0 NAUTOBOT_DEBUG=True +NAUTOBOT_LOG_DEPRECATION_WARNINGS=True NAUTOBOT_LOG_LEVEL=DEBUG NAUTOBOT_METRICS_ENABLED=True NAUTOBOT_NAPALM_TIMEOUT=5 @@ -93,6 +94,8 @@ NAUTOBOT_SSOT_INFOBLOX_USERNAME="changeme" NAUTOBOT_SSOT_INFOBLOX_VERIFY_SSL="True" # NAUTOBOT_SSOT_INFOBLOX_WAPI_VERSION="" +NAUTOBOT_SSOT_ENABLE_MERAKI="False" + NAUTOBOT_SSOT_ENABLE_SERVICENOW="False" SERVICENOW_INSTANCE="" SERVICENOW_USERNAME="" diff --git a/development/nautobot_config.py b/development/nautobot_config.py index 33bb80280..7210ce596 100644 --- a/development/nautobot_config.py +++ b/development/nautobot_config.py @@ -18,12 +18,8 @@ if "debug_toolbar" not in INSTALLED_APPS: # noqa: F405 INSTALLED_APPS.append("debug_toolbar") # noqa: F405 - if ( - "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE - ): # noqa: F405 - MIDDLEWARE.insert( - 0, "debug_toolbar.middleware.DebugToolbarMiddleware" - ) # noqa: F405 + if "debug_toolbar.middleware.DebugToolbarMiddleware" not in MIDDLEWARE: # noqa: F405 + MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware") # noqa: F405 # # Misc. settings @@ -55,9 +51,7 @@ "NAUTOBOT_DB_PORT", default_db_settings[nautobot_db_engine]["NAUTOBOT_DB_PORT"], ), # Database port, default to postgres - "CONN_MAX_AGE": int( - os.getenv("NAUTOBOT_DB_TIMEOUT", "300") - ), # Database timeout + "CONN_MAX_AGE": int(os.getenv("NAUTOBOT_DB_TIMEOUT", "300")), # Database timeout "ENGINE": nautobot_db_engine, } } @@ -192,13 +186,48 @@ "atl01": "Atlanta", }, "aristacv_verify": is_truthy(os.getenv("NAUTOBOT_ARISTACV_VERIFY", "true")), + "bootstrap_nautobot_environment_branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH", "develop"), + "bootstrap_models_to_sync": { + "secret": True, + "secrets_group": True, + "git_repository": True, + "dynamic_group": True, + "computed_field": True, + "tag": True, + "graph_ql_query": True, + "software": False, + "software_image": False, + "validated_software": False, + "tenant_group": True, + "tenant": True, + "role": True, + "manufacturer": True, + "platform": True, + "location_type": True, + "location": True, + "team": True, + "contact": True, + "provider": True, + "provider_network": True, + "circuit_type": True, + "circuit": True, + "circuit_termination": True, + "namespace": True, + "rir": True, + "vlan_group": True, + "vlan": True, + "vrf": True, + "prefix": True, + }, "enable_aci": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ACI")), "enable_aristacv": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ARISTACV")), + "enable_bootstrap": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_BOOTSTRAP", "false")), "enable_device42": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DEVICE42")), "enable_dna_center": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_DNA_CENTER")), "enable_infoblox": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_INFOBLOX")), "enable_ipfabric": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_IPFABRIC")), "enable_itential": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_ITENTIAL")), + "enable_meraki": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_MERAKI")), "enable_servicenow": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_SERVICENOW")), "hide_example_jobs": is_truthy(os.getenv("NAUTOBOT_SSOT_HIDE_EXAMPLE_JOBS")), "device42_defaults": { diff --git a/development/towncrier_template.j2 b/development/towncrier_template.j2 index 14c71b020..b90585732 100644 --- a/development/towncrier_template.j2 +++ b/development/towncrier_template.j2 @@ -1,4 +1,15 @@ +# v{{ versiondata.version.split(".")[:2] | join(".") }} Release Notes + +This document describes all new features and changes in the release. The format is based on [Keep a +Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic +Versioning](https://semver.org/spec/v2.0.0.html). + +## Release Overview + +- Major features or milestones +- Changes to compatibility with Nautobot and/or other apps, libraries etc. + {% if render_title %} ## [v{{ versiondata.version }} ({{ versiondata.date }})](https://github.com/nautobot/nautobot-app-ssot/releases/tag/v{{ versiondata.version}}) @@ -12,7 +23,11 @@ {% if definitions[category]['showcontent'] %} {% for text, values in sections[section][category].items() %} {% for item in text.split('\n') %} +{% if values %} - {{ values|join(', ') }} - {{ item.strip() }} +{% else %} +- {{ item.strip() }} +{% endif %} {% endfor %} {% endfor %} diff --git a/docs/admin/compatibility_matrix.md b/docs/admin/compatibility_matrix.md index 40b136855..cce8d22ba 100644 --- a/docs/admin/compatibility_matrix.md +++ b/docs/admin/compatibility_matrix.md @@ -28,3 +28,4 @@ While that last supported version will not be strictly enforced--via the max_ver | 3.0.0 | 2.1.0 | 2.99.99 | | 3.0.1 | 2.1.0 | 2.99.99 | | 3.1.0 | 2.1.0 | 2.99.99 | +| 3.2.0 | 2.1.0 | 2.99.99 | diff --git a/docs/admin/install.md b/docs/admin/install.md index 958362024..d520a47d0 100644 --- a/docs/admin/install.md +++ b/docs/admin/install.md @@ -91,8 +91,11 @@ Set up each integration using the specific guides: - [Cisco ACI](./integrations/aci_setup.md) - [Arista CloudVision](./integrations/aristacv_setup.md) +- [Bootstrap](./integrations/bootstrap_setup.md) - [Device42](./integrations/device42_setup.md) - [Cisco DNA Center](./integrations/dna_center_setup.md) - [Infoblox](./integrations/infoblox_setup.md) - [IPFabric](./integrations/ipfabric_setup.md) +- [Itential](./integrations/itential_setup.md) +- [Cisco Meraki](./integrations/meraki_setup.md) - [ServiceNow](./integrations/servicenow_setup.md) diff --git a/docs/admin/integrations/aci_setup.md b/docs/admin/integrations/aci_setup.md index 9a8cbeb56..2d84e8339 100644 --- a/docs/admin/integrations/aci_setup.md +++ b/docs/admin/integrations/aci_setup.md @@ -56,7 +56,7 @@ PLUGINS_CONFIG = { All APIC specific settings have been updated to use the Controller and related ExternalIntegration objects. The ExternalIntegration object that is assigned to the Controller will define the APIC base URL, user credentials, and SSL verification. It will also have a `tenant_prefix` key in the `extra_config` section of the ExternalIntegration to define the Tenant prefix. -The `aci_apics` setting from the `nautobot_config.py` file is no longer used and any configuration found for it will be automatically migrated into a Controller and an ExternalIntegration object. +The `aci_apics` setting from the `nautobot_config.py` file is no longer used. Any configuration found for the APICs that were defined in `aci_apics` will need to be manually input into the Nautobot UI to create the required ExternalIntegration and Controller objects. ## Nautobot Objects Affected by Settings @@ -82,14 +82,19 @@ There are example YAML files for a few common switch models in `nautobot_ssot/in When upgrading from `nautobot-plugin-ssot-aci` app, it's necessary to [avoid conflicts](../upgrade.md#potential-apps-conflicts). - Uninstall the old app: + ```shell pip uninstall nautobot-plugin-ssot-aci ``` + - Upgrade the app with required extras: + ```shell pip install --upgrade nautobot-ssot[aci] ``` + - Fix `nautobot_config.py` by removing `nautobot_ssot_aci` from `PLUGINS` and merging app configuration into `nautobot_ssot`: + ```python PLUGINS = [ "nautobot_ssot", diff --git a/docs/admin/integrations/bootstrap_setup.md b/docs/admin/integrations/bootstrap_setup.md new file mode 100644 index 000000000..eea250ae6 --- /dev/null +++ b/docs/admin/integrations/bootstrap_setup.md @@ -0,0 +1,88 @@ +# Bootstrap + + +## Description + +This App will sync data from YAML files into Nautobot to create baseline environments. Most items will receive a custom field associated with them called "System of Record", which will be set to "Bootstrap". These items are then the only ones managed by the Bootstrap SSoT App. Other items within the Nautobot instance will not be affected unless there's items with overlapping names. There is currently two exceptions to this and those are the ComputedField, and GraphQLQuery models since they can't have a custom field associated. If you choose to manage ComputedField or GraphQLQuery objects with the Bootstrap SSoT App, make sure to define them all within the YAML file, since any "locally defined" Computed Fields and GraphQL Queries within Nautobot will end up getting deleted when the job runs. If an item exists in Nautobot by it's identifiers but it does not have the "System of Record" custom field on it, the item will be updated with "Bootstrap" (or `SYSTEM_OF_RECORD` environment variable value) when the App runs. This way no duplicates are created, and the App will not delete any items that are not defined in the Bootstrap data but were manually created in Nautobot. + +## Installation + +Before configuring the integration, please ensure, that `nautobot-ssot` app was [installed with Bootstrap integration extra dependencies](../install.md#install-guide). + +```shell +pip install nautobot-ssot[bootstrap] +``` + + +### nautobot_config.py + +The settings here are pretty straightforward, `nautobot_environment_branch` will be loaded from the environment variable `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH`, or default to develop. The rest of the settings define which models/objects you want to have the App sync to Nautobot. There are a couple of caveats to these. For example, for DynamicGroup objects to sync, the filter criteria need to already exist in Nautobot. So, if you are going to have groups that are filtered on platforms/regions/sites/etc make sure not to include DynamicGroup objects in the "models_to_sync" until those items exist. Same for Git Repositories when you want to sync Golden Config-related repositories. The Golden Config App needs to be installed, for the `provided_contents` items to be able to be found. This also goes for the Lifecycle Management app with `Software/ValidatedSoftware` models. + +```python +PLUGINS = ["nautobot_ssot"] + +PLUGINS_CONFIG = { + "nautobot_ssot": { + # Other nautobot_ssot settings ommitted. + "bootstrap_nautobot_environment_branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH", "develop"), + "bootstrap_models_to_sync": { + "secret": True, + "secrets_group": True, + "git_repository": True, + "dynamic_group": True, + "computed_field": True, + "tag": True, + "graph_ql_query": True, + "software": False, + "software_image": False, + "validated_software": False, + "tenant_group": True, + "tenant": True, + "role": True, + "manufacturer": True, + "platform": True, + "location_type": True, + "location": True, + "team": True, + "contact": True, + "provider": True, + "provider_network": True, + "circuit_type": True, + "circuit": True, + "circuit_termination": True, + "namespace": True, + "rir": True, + "vlan_group": True, + "vlan": True, + "vrf": True, + "prefix": True, + }, + "enable_bootstrap": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_BOOTSTRAP", "false")), + } +} +``` + +## Configuration + +### Bootstrap data + +Bootstrap data can be stored in 2 fashions. + +1. (Recommended) Bootstrap data can be stored in a Git Repository and referenced in the app as a Git Datasource. A user should create a Git Repository in Nautobot (including any necessary Secrets and SecretsGroups for access) with the word "Bootstrap" in the name, and with a provided content type of `config contexts`. This is how the App will locate the correct repository. The data structure is flat files, and there is a naming scheme to these files. The first one required is `global_settings.yml`. This contains the main data structures of what data can be loaded `Secrets,SecretsGroups,GitRepository,DynamicGroup,Tag,etc`. You can then create additional `.yml` files with naming of your CI environments, i.e. production, development, etc for default values for specific items. This is where the environment variables described below would be matched to pull in additional data from the other YAML files defined in the directory. + +2. Bootstrap data can be stored within the `nautobot_ssot/bootstrap/fixtures` directory. Using local files is not recommended as this requires a fork of the plugin and locally editing the YAML data files in the fixtures folder. + +A simple structure would look something like this: + +```text +global_settings.yml +develop.yml +prod.yml +staging.yml +``` + +There are 2 environment variables that control how certain things are loaded in the app. + + 1. `NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE` - defines whether to load from the local `fixtures` folder or a GitRepository already present in Nautobot. This setting will get overridden if the user selects something other than `env_var` in the job's GUI settings. + - Acceptable options are `file` or `git`. + 2. `NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH` - Defines the environment and settings you want to import. I.e. production, develop, staging. diff --git a/docs/admin/integrations/dna_center_setup.md b/docs/admin/integrations/dna_center_setup.md index 6dc18d058..c3de6eb9b 100644 --- a/docs/admin/integrations/dna_center_setup.md +++ b/docs/admin/integrations/dna_center_setup.md @@ -12,7 +12,7 @@ pip install nautobot-ssot[dna_center] ## Configuration -Connecting to a DNA Center instance is handled through the Nautobot [Controller](https://docs.nautobot.com/projects/core/en/stable/development/core/controllers/) object. There is an expectation that you will create an [ExternalIntegration](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/externalintegration/) with the requisite connection information for your DNA Center instance attached to that Controller object. All imported Devices will be associated to a [ControllerManagedDeviceGroup](https://docs.nautobot.com/projects/core/en/stable/user-guide/core-data-model/dcim/controllermanageddevicegroup/) that is found or created during each Job run. It will update the group name to be "\ Managed Devices" if it exists. When running the Sync Job you will specify which DNA Center Controller instance you wish to synchronize with. Other behaviors for the integration can be controlled with the following settings: +Connecting to a DNA Center instance is handled through the Nautobot [Controller](https://docs.nautobot.com/projects/core/en/stable/development/core/controllers/) object. There is an expectation that you will create an [ExternalIntegration](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/externalintegration/) with the requisite connection information for your DNA Center instance attached to that Controller object. All imported Devices will be associated to a [ControllerManagedDeviceGroup](https://docs.nautobot.com/projects/core/en/stable/user-guide/core-data-model/dcim/controllermanageddevicegroup/) that is found or created during each Job run. It will update the group name to be " Managed Devices" if it exists. When running the Sync Job you will specify which DNA Center Controller instance you wish to synchronize with. Other behaviors for the integration can be controlled with the following settings: | Configuration Variable | Type | Usage | Default | | --------------------------------------------------- | ------- | ---------------------------------------------------------- | -------------------- | diff --git a/docs/admin/integrations/index.md b/docs/admin/integrations/index.md index 1f28b95e8..e8c23e277 100644 --- a/docs/admin/integrations/index.md +++ b/docs/admin/integrations/index.md @@ -3,9 +3,12 @@ This Nautobot app supports the following integrations: - [Cisco ACI](./aci_setup.md) +- [Bootstrap](./bootstrap_setup.md) - [Arista CloudVision](./aristacv_setup.md) - [Device42](./device42_setup.md) +- [Cisco DNA Center](./dna_center_setup.md) - [Infoblox](./infoblox_setup.md) - [IPFabric](./ipfabric_setup.md) - [Itential](./itential_setup.md) +- [Cisco Meraki](./meraki_setup.md) - [ServiceNow](./servicenow_setup.md) diff --git a/docs/admin/integrations/meraki_setup.md b/docs/admin/integrations/meraki_setup.md new file mode 100644 index 000000000..d08f48fd8 --- /dev/null +++ b/docs/admin/integrations/meraki_setup.md @@ -0,0 +1,25 @@ +# Cisco Meraki Integration Setup + +This guide will walk you through steps to set up Cisco Meraki integration with the `nautobot_ssot` app. + +## Prerequisites + +Before configuring the integration, please ensure, that `nautobot-ssot` app was [installed with Cisco Meraki integration extra dependencies](../install.md#install-guide). + +```shell +pip install nautobot-ssot[meraki] +``` + +## Configuration + +Connecting to a Meraki instance is handled through the Nautobot [Controller](https://docs.nautobot.com/projects/core/en/stable/development/core/controllers/) object. There is an expectation that you will create an [ExternalIntegration](https://docs.nautobot.com/projects/core/en/stable/user-guide/platform-functionality/externalintegration/) with the requisite connection information for your Meraki dashboard attached to that Controller object. All imported Devices will be associated to a [ControllerManagedDeviceGroup](https://docs.nautobot.com/projects/core/en/stable/user-guide/core-data-model/dcim/controllermanageddevicegroup/) that is found or created during each Job run. It will update the group name to be " Managed Devices" if it exists. When running the Sync Job you will specify which Meraki Controller instance you wish to synchronize with along with other settings for the synchronization. + +Below is an example snippet from `nautobot_config.py` that demonstrates how to enable the Meraki integration: + +```python +PLUGINS_CONFIG = { + "nautobot_ssot": { + "enable_meraki": is_truthy(os.getenv("NAUTOBOT_SSOT_ENABLE_MERAKI", "true")), + } +} +``` diff --git a/docs/admin/release_notes/version_1.5.md b/docs/admin/release_notes/version_1.5.md index 68f81cf46..736747ea2 100644 --- a/docs/admin/release_notes/version_1.5.md +++ b/docs/admin/release_notes/version_1.5.md @@ -5,4 +5,4 @@ ## Changed - [206](https://github.com/nautobot/nautobot-app-ssot/pull/206) - Update docs pins for py3.7 compatibility by @cmsirbu -- [207][https://github.com/nautobot/nautobot-app-ssot/pull/207] Drop Python 3.7 Support by @jdrew82 +- [207](https://github.com/nautobot/nautobot-app-ssot/pull/207) Drop Python 3.7 Support by @jdrew82 diff --git a/docs/admin/release_notes/version_3.2.md b/docs/admin/release_notes/version_3.2.md new file mode 100644 index 000000000..3fd9e98ae --- /dev/null +++ b/docs/admin/release_notes/version_3.2.md @@ -0,0 +1,48 @@ + +# v3.2 Release Notes + +This document describes all new features and changes in the release. The format is based on [Keep a +Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic +Versioning](https://semver.org/spec/v2.0.0.html). + +## Release Overview + +- There have been two new integrations added to the project! + + 1. Bootstrap SSoT: The Bootstrap integration allows users to quickly and consistently setup NAutobot environments with base objects like Locations, LocationTypes, Tenants, VLANs and more. This integration, when linked to a Git repository with the requisite data will sync the provided objects, represented in YAML, and will synchronize these objects into Nautobot. Using this integration users can update multiple Nautobot instances with the same data, or easily test and promote changes through a pipeline. Users can also use Bootstrap to spin up local development environments with the base information needed to create test devices to develop new apps for Nautobot. + + 2. Cisco Meraki SSoT: The Cisco Meraki integration allows users to import Networks, Devices, Ports, Prefixes, and IP Addresses from the Meraki Dashboard. Refer to the integration documentation for a full explanation of all capabilities and options for the integration. + +- The DNA Center and Device42 integrations have been updated to allow specifying the LocationType for imported Location objects. + +## [v3.2.0 (2024-10-21)](https://github.com/nautobot/nautobot-app-ssot/releases/tag/v3.2.0) + +### Added + +- [#541](https://github.com/nautobot/nautobot-app-ssot/issues/541) - Add Bootstrap SSoT to Nautobot SSoT Nautobot application +- [#546](https://github.com/nautobot/nautobot-app-ssot/issues/546) - Added support for specifying LocationType for Areas, Buildings, and Floors in DNA Center integration. +- [#546](https://github.com/nautobot/nautobot-app-ssot/issues/546) - Added support for specifying LocationType for Buildings in Device42 integration. +- [#574](https://github.com/nautobot/nautobot-app-ssot/issues/574) - Added integration with Cisco Meraki. + +### Changed + +- [#574](https://github.com/nautobot/nautobot-app-ssot/issues/574) - Updated DNA Center Job to use SSoT verify_controller_managed_device_group utility function so code is more DRY. + +### Fixed + +- [#479](https://github.com/nautobot/nautobot-app-ssot/issues/479) - Corrected the attribute used to reference the ControllerManagedDeviceGroup off a Controller object. +- [#548](https://github.com/nautobot/nautobot-app-ssot/issues/548) - Fixed SSoT jobs not respecting DryRun variable. +- [#558](https://github.com/nautobot/nautobot-app-ssot/issues/558) - Fixed VRF attribute for Prefix create() to be ids instead of attrs. +- [#561](https://github.com/nautobot/nautobot-app-ssot/issues/561) - Bug in IP Fabric that causes some network columns to return host bits set; changed `ip_network` to use `strict=False`. +- [#571](https://github.com/nautobot/nautobot-app-ssot/issues/571) - Fixed requests call that was missing URL scheme. +- [#574](https://github.com/nautobot/nautobot-app-ssot/issues/574) - Fixed the ACI integration's retrieval of Controller Managed Device Group name that was breaking ACI adapter. + +### Documentation + +- [#568](https://github.com/nautobot/nautobot-app-ssot/issues/568) - Changed documentation to include passing job in the example of loading Adapters. +- [#541](https://github.com/nautobot/nautobot-app-ssot/issues/541) - Fixed documentation errors with 1.5 release notes and missing links to integration setup and user sections. +- [#542](https://github.com/nautobot/nautobot-app-ssot/issues/542) - Correct documentation for ACI integration and add missing DNA Center installation documentation. +- [#546](https://github.com/nautobot/nautobot-app-ssot/issues/546) - Added documentation on how to use DNA Center integration along with screenshots of the steps. +- [#546](https://github.com/nautobot/nautobot-app-ssot/issues/546) - Updated documentation for Device42 integration and updated Job form screenshot to update for Building LocationType Job form change. +- [#569](https://github.com/nautobot/nautobot-app-ssot/issues/569) - Add missing links for integrations to Integrations Configuration portion of Install and Configure section. +- [#574](https://github.com/nautobot/nautobot-app-ssot/issues/574) - Added documentation for Meraki integration. diff --git a/docs/dev/contributing.md b/docs/dev/contributing.md index a77574e79..c644b2c8f 100644 --- a/docs/dev/contributing.md +++ b/docs/dev/contributing.md @@ -49,24 +49,14 @@ The branching policy includes the following tenets: Single Source of Truth will observe semantic versioning, as of 1.0. This may result in a quick turnaround in minor versions to keep pace with an ever-growing feature set. +### Backporting to Older Releases + +If you are backporting any fixes to a prior major or minor version of this app, please open an issue, comment on an existing issue, or post in the [Network to Code Slack](https://networktocode.slack.com/) (channel `#nautobot`). + +We will create a `release-X.Y` branch for you to open your PR against and cut a new release once the PR is successfully merged. + ## Release Policy Single Source of Truth has currently no intended scheduled release schedule, and will release new features in minor versions. -When a new release, from `develop` to `main`, is created the following should happen. - -- A release PR is created from `develop` with: - - Update the release notes in `docs/admin/release_notes/version_..md` file to reflect the changes. - - Change the version from `..-beta` to `..` in `pyproject.toml`. - - Set the PR to the `main` branch. -- Ensure the tests for the PR pass. -- Merge the PR. -- Create a new tag: - - The tag should be in the form of `v..`. - - The title should be in the form of `v..`. - - The description should be the changes that were added to the `version_..md` document. -- If merged into `main`, then push from `main` to `develop`, in order to retain the merge commit created when the PR was merged -- A post release PR is created with: - - Change the version from `..` to `..-beta` in both `pyproject.toml` and `nautobot.__init__.__version__`. - - Set the PR to the proper branch, `develop`. - - Once tests pass, merge. +The steps taken by maintainers when creating a new release are documented in the [release checklist](./release_checklist.md). diff --git a/docs/dev/jobs.md b/docs/dev/jobs.md index af945b464..17deadab1 100644 --- a/docs/dev/jobs.md +++ b/docs/dev/jobs.md @@ -56,9 +56,10 @@ class MySSoTRemoteAdapter(Adapter): vlan = VLANModel top_level = ("vlan",) - def __init__(self, *args, api_client, **kwargs): + def __init__(self, *args, api_client, job=None, **kwargs): super().__init__(*args, **kwargs) self.api_client = api_client + self.job = job def load(self): for vlan in self.api_client.get_vlans(): @@ -72,11 +73,11 @@ class ExampleDataSource(DataSource, Job): name = "Example Data Source" def load_source_adapter(self): - self.source_adapter = MySSoTRemoteAdapter(api_client=APIClient()) + self.source_adapter = MySSoTRemoteAdapter(api_client=APIClient(), job=self) self.source_adapter.load() def load_target_adapter(self): - self.target_adapter = MySSoTNautobotAdapter() + self.target_adapter = MySSoTNautobotAdapter(job=self) self.target_adapter.load() jobs = [ExampleDataSource] @@ -177,4 +178,4 @@ If you need to perform the `create`, `update` and `delete` operations on the rem You still want your models to adhere to the [modeling guide](../user/modeling.md), since it provides you the auto-generated `load` function for the diffsync adapter on the Nautobot side. !!! warning - Special care should be taken when synchronizing new Devices with children Interfaces into a Nautobot instance that also defines Device Types with Interface components of the same name. When the new Device is created in Nautobot, its Interfaces will also be created as defined in the respective Device Type. As a result, when SSoT will attempt to create the children Interfaces loaded by the remote adapter, these will already exist in the target Nautobot system. In this scenario, if not properly handled, the sync will fail! Possible remediation steps may vary depending on the specific use-case, therefore this is left as an exercise to the reader/developer to solve for their specific context. \ No newline at end of file + Special care should be taken when synchronizing new Devices with children Interfaces into a Nautobot instance that also defines Device Types with Interface components of the same name. When the new Device is created in Nautobot, its Interfaces will also be created as defined in the respective Device Type. As a result, when SSoT will attempt to create the children Interfaces loaded by the remote adapter, these will already exist in the target Nautobot system. In this scenario, if not properly handled, the sync will fail! Possible remediation steps may vary depending on the specific use-case, therefore this is left as an exercise to the reader/developer to solve for their specific context. diff --git a/docs/dev/release_checklist.md b/docs/dev/release_checklist.md new file mode 100644 index 000000000..558ec0334 --- /dev/null +++ b/docs/dev/release_checklist.md @@ -0,0 +1,214 @@ +# Release Checklist + +This document is intended for app maintainers and outlines the steps to perform when releasing a new version of the app. + +!!! important + Before starting, make sure your **local** `develop`, `main`, and (if applicable) the current LTM branch are all up to date with upstream! + + ``` + git fetch + git switch develop && git pull # and repeat for main/ltm + ``` + +Choose your own adventure: + +- LTM release? Jump [here](#ltm-releases). +- Patch release from `develop`? Jump [here](#all-releases-from-develop). +- Minor release? Continue with [Minor Version Bumps](#minor-version-bumps) and then [All Releases from `develop`](#all-releases-from-develop). + +## Minor Version Bumps + +### Update Requirements + +Every minor version release should refresh `poetry.lock`, so that it lists the most recent stable release of each package. To do this: + +0. Run `poetry update --dry-run` to have Poetry automatically tell you what package updates are available and the versions it would upgrade to. This requires an existing environment created from the lock file (i.e. via `poetry install`). +1. Review each requirement's release notes for any breaking or otherwise noteworthy changes. +2. Run `poetry update ` to update the package versions in `poetry.lock` as appropriate. +3. If a required package requires updating to a new release not covered in the version constraints for a package as defined in `pyproject.toml`, (e.g. `Django ~3.1.7` would never install `Django >=4.0.0`), update it manually in `pyproject.toml`. +4. Run `poetry install` to install the refreshed versions of all required packages. +5. Run all tests (`poetry run invoke tests`) and check that the UI and API function as expected. + +### Update Documentation + +If there are any changes to the compatibility matrix (such as a bump in the minimum supported Nautobot version), update it accordingly. + +Commit any resulting changes from the following sections to the documentation before proceeding with the release. + +!!! tip + Fire up the documentation server in your development environment with `poetry run mkdocs serve`! This allows you to view the documentation site locally (the link is in the output of the command) and automatically rebuilds it as you make changes. + +### Verify the Installation and Upgrade Steps + +Follow the [installation instructions](../admin/install.md) to perform a new production installation of the app. If possible, also test the [upgrade process](../admin/upgrade.md) from the previous released version. + +The goal of this step is to walk through the entire install process *as documented* to make sure nothing there needs to be changed or updated, to catch any errors or omissions in the documentation, and to ensure that it is current with each release. + +--- + +## All Releases from `develop` + +### Verify CI Build Status + +Ensure that continuous integration testing on the `develop` branch is completing successfully. + +### Bump the Version + +Update the package version using `poetry version` if necessary. This command shows the current version of the project or bumps the version of the project and writes the new version back to `pyproject.toml` if a valid bump rule is provided. + +The new version must be a valid semver string or a valid bump rule: `patch`, `minor`, `major`, `prepatch`, `preminor`, `premajor`, `prerelease`. Always try to use a bump rule when you can. + +Display the current version with no arguments: + +```no-highlight +> poetry version +nautobot-ssot 1.0.0-beta.2 +``` + +Bump pre-release versions using `prerelease`: + +```no-highlight +> poetry version prerelease +Bumping version from 1.0.0-beta.2 to 1.0.0-beta.3 +``` + +For major versions, use `major`: + +```no-highlight +> poetry version major +Bumping version from 1.0.0-beta.2 to 1.0.0 +``` + +For patch versions, use `minor`: + +```no-highlight +> poetry version minor +Bumping version from 1.0.0 to 1.1.0 +``` + +And lastly, for patch versions, you guessed it, use `patch`: + +```no-highlight +> poetry version patch +Bumping version from 1.1.0 to 1.1.1 +``` + +Please see the [official Poetry documentation on `version`](https://python-poetry.org/docs/cli/#version) for more information. + +### Update the Changelog + +!!! important + The changelog must adhere to the [Keep a Changelog](https://keepachangelog.com/) style guide. + +This guide uses `1.4.2` as the new version in its examples, so change it to match the version you bumped to in the previous step! Every. single. time. you. copy/paste commands :) + +First, create a release branch off of `develop` (`git switch -c release-1.4.2 develop`). + +> You will need to have the project's poetry environment built at this stage, as the towncrier command runs **locally only**. If you don't have it, run `poetry install` first. + +Generate release notes with `invoke generate-release-notes --version 1.4.2` and answer `yes` to the prompt `Is it okay if I remove those files? [Y/n]:`. This will update the release notes in `docs/admin/release_notes/version_X.Y.md`, stage that file in git, and `git rm` all the fragments that have now been incorporated into the release notes. + +There are two possibilities: + +1. If you're releasing a new major or minor version, rename the `version_X.Y.md` file accordingly (e.g. rename to `docs/admin/release_notes/version_1.4.md`). Update the `Release Overview` and add this new page to the table of contents within `mkdocs.yml`. +2. If you're releasing a patch version, copy your version's section from the `version_X.Y.md` file into the already existing `docs/admin/release_notes/version_1.4.md` file. Delete the `version_X.Y.md` file. + +Stage all the changes (`git add`) and check the diffs to verify all of the changes are correct (`git diff --cached`). + +Commit `git commit -m "Release v1.4.2"` and `git push` the staged changes. + +### Submit Release Pull Request + +Submit a pull request titled `Release v1.4.2` to merge your release branch into `main`. Copy the documented release notes into the pull request's body. + +!!! important + Do not squash merge this branch into `main`. Make sure to select `Create a merge commit` when merging in GitHub. + +Once CI has completed on the PR, merge it. + +### Create a New Release in GitHub + +Draft a [new release](https://github.com/nautobot/nautobot-app-ssot/releases/new) with the following parameters. + +* **Tag:** Input current version (e.g. `v1.4.2`) and select `Create new tag: v1.4.2 on publish` +* **Target:** `main` +* **Title:** Version and date (e.g. `v1.4.2 - 2024-04-02`) + +Click "Generate Release Notes" and edit the auto-generated content as follows: + +- Change the entries generated by GitHub to only the usernames of the contributors. e.g. `* Updated dockerfile by @nautobot_user in https://github.com/nautobot/nautobot-app-ssot/pull/123` -> `* @nautobot_user`. + - This should give you the list for the new `Contributors` section. + - Make sure there are no duplicated entries. +- Replace the content of the `What's Changed` section with the description of changes from the release PR (what towncrier generated). +- If it exists, leave the `New Contributors` list as it is. + +The release notes should look as follows: + +```markdown +## What's Changed + +**Towncrier generated Changed/Fixed/Housekeeping etc. sections here** + +## Contributors + +* @alice +* @bob + +## New Contributors + +* @bob + +**Full Changelog**: https://github.com/nautobot/nautobot-app-ssot/compare/v1.4.1...v1.4.2 +``` + +Publish the release! + +### Create a PR from `main` back to `develop` + +First, sync your `main` branch with upstream changes: `git switch main && git pull`. + +Create a new branch from `main` called `release-1.4.2-to-develop` and use `poetry version prepatch` to bump the development version to the next release. + +For example, if you just released `v1.4.2`: + +```no-highlight +> git switch -c release-1.4.2-to-develop main +Switched to a new branch 'release-1.4.2-to-develop' + +> poetry version prepatch +Bumping version from 1.4.2 to 1.4.3a1 + +> git add pyproject.toml && git commit -m "Bump version" + +> git push +``` + +!!! important + Do not squash merge this branch into `develop`. Make sure to select `Create a merge commit` when merging in GitHub. + +Open a new PR from `release-1.4.2-to-develop` against `develop`, wait for CI to pass, and merge it. + +### Final checks + +At this stage, the CI should be running or finished for the `v1.4.2` tag and a package successfully published to PyPI and added into the GitHub Release. Double check that's the case. + +Documentation should also have been built for the tag on ReadTheDocs and if you're reading this page online, refresh it and look for the new version in the little version fly-out menu down at the bottom right of the page. + +All done! + + +## LTM Releases + +For projects maintaining a Nautobot LTM compatible release, all development and release management is done through the `ltm-x.y` branch. The `x.y` relates to the LTM version of Nautobot it's compatible with, for example `1.6`. + +The process is similar to releasing from `develop`, but there is no need for post-release branch syncing because you'll release directly from the LTM branch: + +1. Make sure your `ltm-1.6` branch is passing CI. +2. Create a release branch from the `ltm-1.6` branch: `git switch -c release-1.2.3 ltm-1.6`. +3. Bump up the patch version `poetry version patch`. If you're backporting a feature instead of bugfixes, bump the minor version instead with `poetry version minor`. +4. Generate the release notes: `invoke generate-release-notes --version 1.2.3`. +5. Move the release notes from the generated `docs/admin/release_notes/version_X.Y.md` to `docs/admin/release_notes/version_1.2.md`. +6. Add all the changes and `git commit -m "Release v1.2.3"`, then `git push`. +7. Open a new PR against `ltm-1.6`. Once CI is passing in the PR, `Create a merge commit` (don't squash!). +8. Create a New Release in GitHub - use the same steps documented [here](#create-a-new-release-in-github). +9. Open a separate PR against `develop` to synchronize all LTM release changelogs into the latest version of the docs for visibility. diff --git a/docs/images/device42_job-form.png b/docs/images/device42_job-form.png index 2da6f005c..d53cba9a9 100644 Binary files a/docs/images/device42_job-form.png and b/docs/images/device42_job-form.png differ diff --git a/docs/images/dnac_controller.png b/docs/images/dnac_controller.png new file mode 100644 index 000000000..f3797e2d3 Binary files /dev/null and b/docs/images/dnac_controller.png differ diff --git a/docs/images/dnac_dashboard.png b/docs/images/dnac_dashboard.png new file mode 100644 index 000000000..1a97d9d6b Binary files /dev/null and b/docs/images/dnac_dashboard.png differ diff --git a/docs/images/dnac_detail-view.png b/docs/images/dnac_detail-view.png new file mode 100644 index 000000000..28b8e6bae Binary files /dev/null and b/docs/images/dnac_detail-view.png differ diff --git a/docs/images/dnac_enabled_job.png b/docs/images/dnac_enabled_job.png new file mode 100644 index 000000000..d038bee8f Binary files /dev/null and b/docs/images/dnac_enabled_job.png differ diff --git a/docs/images/dnac_external_integration.png b/docs/images/dnac_external_integration.png new file mode 100644 index 000000000..a58956d4c Binary files /dev/null and b/docs/images/dnac_external_integration.png differ diff --git a/docs/images/dnac_external_integration_adv.png b/docs/images/dnac_external_integration_adv.png new file mode 100644 index 000000000..3e2752fd5 Binary files /dev/null and b/docs/images/dnac_external_integration_adv.png differ diff --git a/docs/images/dnac_job_form.png b/docs/images/dnac_job_form.png new file mode 100644 index 000000000..33275af38 Binary files /dev/null and b/docs/images/dnac_job_form.png differ diff --git a/docs/images/dnac_job_list.png b/docs/images/dnac_job_list.png new file mode 100644 index 000000000..e24d9d885 Binary files /dev/null and b/docs/images/dnac_job_list.png differ diff --git a/docs/images/dnac_job_settings.png b/docs/images/dnac_job_settings.png new file mode 100644 index 000000000..2615b8ff9 Binary files /dev/null and b/docs/images/dnac_job_settings.png differ diff --git a/docs/images/dnac_jobresult.png b/docs/images/dnac_jobresult.png new file mode 100644 index 000000000..b4931debe Binary files /dev/null and b/docs/images/dnac_jobresult.png differ diff --git a/docs/images/dnac_password_secret.png b/docs/images/dnac_password_secret.png new file mode 100644 index 000000000..8f0900157 Binary files /dev/null and b/docs/images/dnac_password_secret.png differ diff --git a/docs/images/dnac_secretsgroup.png b/docs/images/dnac_secretsgroup.png new file mode 100644 index 000000000..565ea23f1 Binary files /dev/null and b/docs/images/dnac_secretsgroup.png differ diff --git a/docs/images/dnac_ssot-sync-details.png b/docs/images/dnac_ssot-sync-details.png new file mode 100644 index 000000000..33be053e7 Binary files /dev/null and b/docs/images/dnac_ssot-sync-details.png differ diff --git a/docs/images/dnac_username_secret.png b/docs/images/dnac_username_secret.png new file mode 100644 index 000000000..29ed1d3fc Binary files /dev/null and b/docs/images/dnac_username_secret.png differ diff --git a/docs/images/meraki_controller.png b/docs/images/meraki_controller.png new file mode 100644 index 000000000..8e7a7c22b Binary files /dev/null and b/docs/images/meraki_controller.png differ diff --git a/docs/images/meraki_dashboard.png b/docs/images/meraki_dashboard.png new file mode 100644 index 000000000..fe2cb5284 Binary files /dev/null and b/docs/images/meraki_dashboard.png differ diff --git a/docs/images/meraki_detail-view.png b/docs/images/meraki_detail-view.png new file mode 100644 index 000000000..91ae1972d Binary files /dev/null and b/docs/images/meraki_detail-view.png differ diff --git a/docs/images/meraki_enabled_job.png b/docs/images/meraki_enabled_job.png new file mode 100644 index 000000000..ae68c09b1 Binary files /dev/null and b/docs/images/meraki_enabled_job.png differ diff --git a/docs/images/meraki_external_integration.png b/docs/images/meraki_external_integration.png new file mode 100644 index 000000000..65bcbb0db Binary files /dev/null and b/docs/images/meraki_external_integration.png differ diff --git a/docs/images/meraki_job_form.png b/docs/images/meraki_job_form.png new file mode 100644 index 000000000..92a952dcd Binary files /dev/null and b/docs/images/meraki_job_form.png differ diff --git a/docs/images/meraki_job_list.png b/docs/images/meraki_job_list.png new file mode 100644 index 000000000..7fbdc215f Binary files /dev/null and b/docs/images/meraki_job_list.png differ diff --git a/docs/images/meraki_job_settings.png b/docs/images/meraki_job_settings.png new file mode 100644 index 000000000..619436b0f Binary files /dev/null and b/docs/images/meraki_job_settings.png differ diff --git a/docs/images/meraki_jobresult.png b/docs/images/meraki_jobresult.png new file mode 100644 index 000000000..be7b4b682 Binary files /dev/null and b/docs/images/meraki_jobresult.png differ diff --git a/docs/images/meraki_network_loctype.png b/docs/images/meraki_network_loctype.png new file mode 100644 index 000000000..8aad630b6 Binary files /dev/null and b/docs/images/meraki_network_loctype.png differ diff --git a/docs/images/meraki_org_id_secret.png b/docs/images/meraki_org_id_secret.png new file mode 100644 index 000000000..9ed860099 Binary files /dev/null and b/docs/images/meraki_org_id_secret.png differ diff --git a/docs/images/meraki_secretsgroup.png b/docs/images/meraki_secretsgroup.png new file mode 100644 index 000000000..197c2e4db Binary files /dev/null and b/docs/images/meraki_secretsgroup.png differ diff --git a/docs/images/meraki_token_secret.png b/docs/images/meraki_token_secret.png new file mode 100644 index 000000000..9f7430e98 Binary files /dev/null and b/docs/images/meraki_token_secret.png differ diff --git a/docs/user/integrations/bootstrap.md b/docs/user/integrations/bootstrap.md new file mode 100644 index 000000000..94b89dade --- /dev/null +++ b/docs/user/integrations/bootstrap.md @@ -0,0 +1,735 @@ +## Usage + +## Process + +### Bootstrap as DataSource + +Synchronization of data follows this workflow: +1. Load data from Bootstrap YAML file (limited to `models_to_sync`) +2. Load data from Nautobot (limited to `models_to_sync`, and objects that also have the `CustomField` `system_of_record` set to "Bootstrap".) +3. DiffSync determines Creates, Updates, Deletes +4. If an object is being created (an object loaded from Bootstrap was not loaded from Nautobot) Bootstrap will first check to see if an object with the same name exists in Nautobot but does not have the `system_of_record` field set. If it finds an object, it will update it with the Bootstrap values and set the `system_of_record` field to "Bootstrap". +5. If an object needs to be updated it will be updated with the values provided by Bootstrap data. +6. If an object needs to be deleted it will be deleted. + + +### Bootstrap as DataTarget + +NotYetImplemented + +### Data structures + +#### global_settings.yml (see '../bootstrap/fixtures/global_settings.yml for examples of supported models) + +```yaml +secret: + - name: Github_Service_Acct + provider: environment-variable # or text-file + parameters: + variable: GITHUB_SERVICE_ACCT + path: + - name: Github_Service_Token + provider: environment-variable # or text-file + parameters: + variable: GITHUB_SERVICE_TOKEN + path: +secrets_group: + - name: Github_Service_Account + secrets: + - name: Github_Service_Acct + secret_type: username + access_type: HTTP(S) + - name: Github_Service_Token + secret_type: token + access_type: HTTP(S) +git_repository: + - name: "Backbone Config Contexts" + url: "https://github.com/nautobot/backbone-config-contexts.git" + branch: "main" # if branch is defined it will be used instead of the "git_branch" in the "branch" variable file. + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" + - name: "Datacenter Config Contexts" + url: "https://github.com/nautobot/datacenter-config-contexts.git" + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" +dynamic_group: + - name: Backbone Domain + content_type: dcim.device + filter: | + { + "tenant": [ + "backbone" + ] + } +computed_field: + - label: Compliance Change + content_type: nautobot_golden_config.configcompliance + template: '{{ obj | get_change_log }}' +tag: + - name: Backbone + color: '795548' + content_types: + - dcim.device +graph_ql_query: + - name: "Backbone Devices" + query: | + query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + device_role { + name + } + tenant { + name + } + primary_ip4 { + address + } + } + } +software: + - device_platform: "arista_eos" + version: "4.25.10M" + alias: + release_date: "2023-12-04" + eos_date: "2023-12-04" + documentation_url: "https://arista.com" # url is currently required due to a bug in the Device Lifecycle Management Plugin https://github.com/nautobot/nautobot-app-device-lifecycle-mgmt/issues/263 + lts: false + pre_release: false + tags: ['Backbone'] +software_image: + - software: arista_eos - 15.4.3 + platform: arista_eos + software_version: 15.4.3 + file_name: arista15.4.3.bin + download_url: https://arista.com + image_file_checksum: + default_image: false + tags: ['Test'] +validated_software: + - software: "arista.eos.eos - 4.25.10M" + valid_since: 2023-08-07 + valid_until: + preferred_version: false + tags: [] +``` + +#### develop.yml + +```yaml +git_branch: develop +``` + +## Content Types + +There are a couple models like Tags and Git Repositories that have associated content types. These require a specific format when listing them in the YAML file. The format of these is the `app_label`.`model`, though models can somewhat vary from App to App. Here is a list of some of the most common ones: + +```yaml +- "circuits.circuit" +- "circuits.circuittermination" +- "circuits.provider" +- "circuits.providernetwork" +- "dcim.cable" +- "dcim.consoleport" +- "dcim.consoleserverport" +- "dcim.device" +- "dcim.devicebay" +- "dcim.devicetype" +- "dcim.frontport" +- "dcim.interface" +- "dcim.inventoryitem" +- "dcim.powerfeed" +- "dcim.poweroutlet" +- "dcim.powerpanel" +- "dcim.powerport" +- "dcim.rack" +- "dcim.rackreservation" +- "dcim.rearport" +- "dcim.site" +- "dcim.virtualchassis" +- "extras.gitrepository" +- "extras.job" +- "extras.secret" +- "ipam.aggregate" +- "ipam.ipaddress" +- "ipam.prefix" +- "ipam.routetarget" +- "ipam.service" +- "ipam.vlan" +- "ipam.vrf" +- "nautobot_device_lifecycle_mgmt.contactlcm" +- "nautobot_device_lifecycle_mgmt.contractlcm" +- "nautobot_device_lifecycle_mgmt.cvelcm" +- "nautobot_device_lifecycle_mgmt.devicesoftwarevalidationresult" +- "nautobot_device_lifecycle_mgmt.hardwarelcm" +- "nautobot_device_lifecycle_mgmt.inventoryitemsoftwarevalidationresult" +- "nautobot_device_lifecycle_mgmt.softwareimagelcm" +- "nautobot_device_lifecycle_mgmt.softwarelcm" +- "nautobot_device_lifecycle_mgmt.validatedsoftwarelcm" +- "nautobot_device_lifecycle_mgmt.vulnerabilitylcm" +- "nautobot_golden_config.compliancefeature" +- "nautobot_golden_config.compliancerule" +- "nautobot_golden_config.configcompliance" +- "nautobot_golden_config.configremove" +- "nautobot_golden_config.configreplace" +- "nautobot_golden_config.goldenconfig" +- "nautobot_golden_config.goldenconfigsetting" +- "tenancy.tenant" +- "virtualization.cluster" +- "virtualization.virtualmachine" +- "virtualization.vminterface" +``` + +## Object Model Notes + +### Manufacturer + +Create Manufacturer objects. Uses the folowing data structure: + +```yaml +manufacturer: + - name: # str + description: # str +``` + +### Platform + +Create Platform objects. Uses the following data structure: + +```yaml +platform: + - name: # str + manufacturer: # str + network_driver: # str + napalm_driver: # str + napalm_arguments: {} # dict + description: # str +``` + +Ensure Manufacturer objects are created before reference. + +### LocationType + +Create LocationType objects. Uses the following data structure: + +```yaml +location_type: + - name: # str + parent: # str + nestable: # bool + description: # str + content_types: [] # List[str] +``` + +### Location + +Create Location objects. Uses the following data structure: + +```yaml +location: + - name: # str + location_type: # str + parent: # str + status: # str + facility: # str + asn: # int + time_zone: # str + description: # str + tenant: # str + physical_address: # str + shipping_address: # str + latitude: # str + longitude: # str + contact_name: # str + contact_phone: # str + contact_email: # str + tags: [] # List[str] +``` + +`location_type`, `parent`, `status`, `time_zone`, `tenant`, and `tags` are all references to objects. Ensure they exist prior to attempting to reference them here. + +Ensure that location types that you reference here are first defined in the location models or they will fail to create. + +### TenantGroup + +Create TenantGroup objects. Uses the following data structure: + +```yaml +tenant_group: + - name: # str + parent: # str + description: # str +``` + +### Tenant + +Create Tenant objects. Uses the following data structure: + +```yaml +tenant: + - name: # str + tenant_group: # str + description: # str + tags: [] # List[str] +``` + +Ensure that tenant groups that you reference here are first defined in the location models or they will fail to create. + +### Role + +Create Role objects. Uses the following data structure: + +```yaml +role: + - name: "Administrative" # str + weight: # int + description: "Unit plays an administrative role" # str + color: "2196f3" # str + content_types: # List[str] + - "extras.contactassociation" + - name: "Anycast" + weight: + description: "" + color: "ffc107" + content_types: + - "ipam.ipaddress" + - name: "Billing" + weight: + description: "Unit plays a billing role" + color: "4caf50" + content_types: + - "extras.contactassociation" + - name: "CARP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "GLBP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "HSRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "Loopback" + weight: + description: "" + color: "9e9e9e" + content_types: + - "ipam.ipaddress" + - name: "On Site" + weight: + description: "Unit plays an on site role" + color: "111111" + content_types: + - "extras.contactassociation" + - name: "Secondary" + weight: + description: "" + color: "2196f3" + content_types: + - "ipam.ipaddress" + - name: "Support" + weight: + description: "Unit plays a support role" + color: "ffeb3b" + content_types: + - "extras.contactassociation" + - name: "VIP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "VRRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" +``` + +This also recreates the default Roles included in Nautobot core. This is because the Role model does not support custom fields, and therefore can not be selectively synced with the SSoT framework. Any roles not included in the Bootstrap `global_settings.yaml` file will be deleted. The list obove is the default list of roles included in Nautobot Core. + +### Team + +Create Team objects. Uses the following data structure: + +```yaml +team: + - name: # str + phone: # str + email: # str + address: # str + # contacts: [] +``` + +Currently, assigning contacts to a team through the `contact:` key is not supported due to the way that DiffSync works. Assign Contacts to a Team by adding the Team to the `team` list in the `Contact` model. In part this is due to the fact that contacts need to exist before being assigned to `team.contacts`. + +### Contact + +Create Contact objects. Uses the following data structure: + +```yaml +contact: + - name: # str + phone: # str + email: # str + address: # str + teams: [] # List[str] +``` + +As noted above, a `Contact` can be assigned to a `Team` by listing the `Team` names in the `teams:` key in the `Contact` model. + + +### Provider + +Create Provider objects. Uses the following data structure: + +```yaml +provider: + - name: # str + asn: # int + account_number: # str + portal_url: # str + noc_contact: # str + admin_contact: # str + tags: [] # List[str] +``` + +### Provider Network + +Create ProviderNetwork objects. Uses the following data structure: + +```yaml +provider_network: + - name: # str + provider: # str + description: # str + comments: # str + tags: [] # List[str] +``` + +`provider` is a reference to a Provider object. Ensure it exists before trying to assign it. + +### CircuitType + +Create CircuitType objects. Uses the following data structure: + +```yaml +circuit_type: + - name: # str + description: # str +``` + +### Circuit + +Create Circuit objects. Uses the following data structure: + +```yaml +circuit: + - circuit_id: # str + provider: # str + circuit_type: # str + status: # str + date_installed: # date (YYYY-MM-DD) + commit_rate_kbps: # int + description: # str + tenant: # str + tags: [] # List[str] +``` + +`circuit_type`, `status`, `tenant`, and `tags` are references to existing objects in Nautobot. Make sure these exist before trying to assign them. + +### CircuitTermination + +Create CircuitTermination objects. Uses the following data structure. + +```yaml +circuit_termination: + - name: # str + termination_type: # str + location: # str + provider_network: # str + port_speed_kbps: # int + upstream_speed_kbps: # int + cross_connect_id: # str + patch_panel_or_ports: # str + description: # str + tags: [] # List[str] +``` + +`termination_type` can be "Provider Network" or "Location" which are the only allowed relationships in the database for CircuitTermination objects. If you specify `termination_type` as "Provider Network" you will need to provide a valid Provider Network name in the `provider_network` field. If you specify "Location" as the `termination_type` you will specify a valid Location name in the `location` field. The `name` field is a bit special and should be formatted as follows as it is used to reference the Circuit objects `____`. The termination side can be "A" or "Z", and the Circuit ID and Provider Name are used to look up the correct Circuit and Provider information on creation, so make sure those exist prior to reference. + +### Namespace (IPAM) + +Create Namespace objects. Uses the following data structure:ß + +```yaml +namespace: + - name: # str + description: # str + location: # str +``` + +`location` is a reference to a location name and the app will attempt to look up this location by name and associate it with the namespace. Make sure the location exists. All uniqueness constraints are enforced by the ORM. + +### RIR (IPAM) + +Create RIR objects. Uses the following data structure: + +```yaml +rir: + - name: # str + private: # bool: defaults to false + description: # str +``` + +### VRF (IPAM) + +Create VRF objects. Uses the following data structure: + +```yaml +vrf: + - name: # str + namespace: # str + route_distinguisher: # str + description: # str + # prefixes: # List[str] + tenant: # str + tags: # List[str] +``` + +`namespace` and `tenant` are strings which reference the namespace and tenant names respectively. Make sure these exist in Nautobot or are in global_settings.yaml so they can be associated. `tenant` defaults to None if blank or can't find the Nautobot Tenant. `namespace` defaults to the Global namespace if blank or can't be found. Currently due to the order that the app syncs objects, `prefixes` can't be defined on VRFs and must be assigned from the `prefix` object by specifying `vrfs` on the `prefix` definition. All uniqueness constraints are enforced by the ORM. + +### VLAN Group + +Create VLANGroup objects. Uses the following data structure: + +```yaml +vlan_group: + - name: # str + location: # str + description: # str +``` + +`location` is a reference to a location name and the app will attempt to look up this location by name and associate it with the namespace. Make sure the location exists. All uniqueness constraints are enforced by the ORM. + +### VLAN + +Create VLAN objects. Uses the following data structure: + +```yaml +vlan: + - name: # str + vid: # int between 1 and 4094 + description: # str + status: # str + role: # str + locations: # List[str] + vlan_group: # str + tenant: # str + tags: # List[str] +``` + +`locations` and `tags` lists of strings which reference the location and tag names respectively. Make sure these exist in Nautobot or are in global_settings.yaml so they can be associated. `vlan_group` is a reference to a Nautobot VLANGroup name. This will be associated if it exists, or default to None if the Nautobot VLANGroup can't be found. `tenant`, `role`, and `status` are references to Tenant, Role, and Status objects in Nautobot. The app will attempt to look these up and associate them. `role` and `tenant` default to None if the object can't be found. `status` defaults to Active if a improper status is defined. All uniqueness constraints are enforced by the ORM. + +### Prefix + +Create Prefix objects. Uses the following data structure: + +```yaml +prefix: + - network: # str (cidr notation) + namespace: # str + prefix_type: # str # network, container, or pool + status: # str + role: # str + rir: # str + date_allocated: # str(datetime) (YYYY-mm-dd HH:mm:ss) + description: # str + vrfs: # List[str] + locations: # List[str] + vlan: # str + tenant: # str + tags: # List[str] +``` + +`vrfs`, `locations`, and `tags` are lists of strings that reference the names of VRF, Location, and Tag objects in Nautobot. Make sure these exist or they will default to None if they can't be found. `network` is the CIDR notation for the prefix. `namespace`, `status`, `role`, `rir`, `vlan`, and `tenant` are also references to names of their respective objects in Nautobot. `status` defaults to Active, and the rest default to None if not found or are left blank. `prefix_type` options are limited by the `PrefixStatusChoices` defined in `nautobot.ipam.choices`. `date_allocated` should be in the format indicated above as a datetime string (Year-Month-Day Hours:Minutes:Seconds) with time in 24 hour format in order to properly set the `date_allocated` field on the prefix object. For example "1970-01-01 00:00:00". They are all lowercase network, container, or pool. All uniqueness constraints are enforced by the ORM. + +### Secret + +Create Secret objects. Uses the following data structure: + +```yaml +secret: + - name: # str + provider: "environment-variable" # or text-file + parameters: # str + variable: # str + path: # str +``` + +`Secret` objects need to be created before `SecretsGroup` objects references them, so make sure any `Secret` objects you are wanting to reference in `SecretGroups` objects are created here or already exist in Nautobot. + +### SecretsGroup + +Create SecretsGroup objects. Uses the following data structure: + +```yaml +secrets_group: + - name: # str + secrets: # str + - name: # str + secret_type: # str + access_type: # str + - name: # str + secret_type: # str + access_type: # str +``` + +`Secret` objects need to be created before SecretsGroup references them, so make sure any `Secret` objects you are wanting to reference in `SecretGroups` objects are created here or already exist in Nautobot. + +### GitRepository + +Create GitRepository objects. Uses the following data structure: + +```yaml +git_repository: + - name: # str + url: # str + branch: # str + secrets_group_name: # str + provided_data_type: [] # List[str] + +# develop/staging/production.yaml +git_branch: # str +``` + +GitRepositories are a bit unique. If you specify they `branch:` key in the global_settings.yaml file, this will override the `git_branch:` key in the `.yaml` file. The `git_branch:` key in the environment specific yaml file is the default, so you don't have to specify branches for each git repository. + +### DynamicGroup + +Create DynamicGroup objects. Uses the following data structure: + +```yaml +dynamic_group: + - name: # str + content_type: # str + description: # str + filter: | # str +``` + +The `filter:` key for DynamicGroup objects takes a string representation of the JSON filter to group the devices required. + +### Computed_Field + +Create ComputedField objects. Uses the following data structure: + +```yaml +computed_field: + - label: # str + content_type: # str + template: # str +``` + +The `template:` key for ComputedField objects takes a jinja variable format string which will display the calculated information. + +### Tag + +Create Tag objects. Uses the following data structure: + +```yaml +tag: + - name: # str + color: # str + description: # str + content_types: [] # List[str] +``` + +The `color` tag is optional, but will default to grey if not specified. The `content_types` list is a list of `path.model` formatted strings for the types of objects that the tags should be able to apply to. + +### GraphQLQuery + +Create GraphQLQuery objects. Uses the following data structure: + +```yaml +graph_ql_query: + - name: # str + query: | # str +``` + +The `query:` key takes a graphql formatted string to retrieve the information required. + +### Software + + - Note: Requires Nautobot Device Lifecycle Plugin Installed + +Create Software objects. Uses the following data structure: + +```yaml +software: + - device_platform: # str + version: # str + alias: # str + release_date: # date (YYYY-MM-DD) + eos_date: # date (YYYY-MM-DD) + documentation_url: # str + lts: # bool + pre_release: # bool + tags: [] # List[str] +``` + +The `device_platform` key must be a Platform that exists in Nautobot or is created by this plugin. The date fields `release_date` and `eos_date` need to be formatted YYYY-MM-DD in order to properly import. + +### SoftwareImage + + - Note: Requires Nautobot Device Lifecycle Plugin Installed + +Create Software Image objects. Uses the following data structure: + +```yaml +software_image: + - software: # str + platform: # str + software_version: # str + file_name: # str + download_url: # str + image_file_checksum: # str + hashing_algorithm: # str + default_image: # bool + tags: [] # List[str] +``` + +The `software`, `platform`, and `software_version` keys are linked and should be consistent. The Platform and Software must already be present in Nautobot for these models to be created. The format for the `software:` key is important and should be `-`. + +### ValidatedSoftware + + - Note: Requires Nautobot Device Lifecycle Plugin Installed + +Create ValidatedSoftware objects. Uses the following data structure: + +```yaml +validated_software: + - software: # str + valid_since: # date (YYYY-MM-DD) + valid_until: # date {YYYY-MM-DD} + preferred_version: # bool + devices: [] # List[str] + device_types: [] # List[str] + device_roles: [] # List[str] + inventory_items: [] # List[str] + object_tags: [] # List[str] + tags: [] # List[str] +``` + +The `software:` key is a reference to the platform and software version of a Software object that already exists in Nautobot (or is created by this plugin). The `valid_since` and `valid_until` fields must dates in YYYY-MM-DD format. The `devices`, `device_types`, `device_roles`, `inventory_items`, and `object_tags` are all lists of objects to apply the validated software to for validation against what is currently running on the device. diff --git a/docs/user/integrations/device42.md b/docs/user/integrations/device42.md index 00f027a2e..2e449a05b 100644 --- a/docs/user/integrations/device42.md +++ b/docs/user/integrations/device42.md @@ -6,7 +6,7 @@ From Device42 into Nautobot, it synchronizes the following objects: | Device42 | Nautobot | | ----------------------- | ---------------------------- | -| Buildings | Sites | +| Buildings | Locations | | Rooms | RackGroups | | Racks | Racks | | Vendors | Manufacturers | @@ -36,7 +36,9 @@ To start the synchronization, simply select the ExternalIntegration that corresp ![Job Form](../../images/device42_job-form.png) -If you wish to just test the synchronization but not have any data created in Nautobot you'll want to select the `Dry run` toggle. Clicking the `Debug` toggle will enable more verbose logging to inform you of what is occuring behind the scenes. Finally, the `Bulk import` option will enable bulk create and update operations to be used when the synchronization is complete. This can improve performance times for the App by forsaking validation of the imported data. Be aware that this could potentially cause bad data to be pushed into Nautobot. +> As of SSoT 3.2.0 you now have the option to define the LocationType to use for imported Buildings. If unspecifed in the Job form it will resort to using the Site LocationType as it did previously. + +If you wish to just test the synchronization but not have any data created in Nautobot you'll want to select the `Dryrun` toggle. Clicking the `Debug` toggle will enable more verbose logging to inform you of what is occuring behind the scenes. Finally, the `Bulk import` option will enable bulk create and update operations to be used when the synchronization is complete. This can improve performance times for the App by forsaking validation of the imported data. Be aware that this could potentially cause bad data to be pushed into Nautobot. Running this Job will redirect you to a `Nautobot Job Result` view. diff --git a/docs/user/integrations/dna_center.md b/docs/user/integrations/dna_center.md new file mode 100644 index 000000000..07606b2bd --- /dev/null +++ b/docs/user/integrations/dna_center.md @@ -0,0 +1,85 @@ +# Cisco DNA Center SSoT Integration + +The Cisco DNA Center SSoT integration is built as part of the [Nautobot Single Source of Truth (SSoT)](https://github.com/nautobot/nautobot-app-ssot) app. The SSoT app enables Nautobot to be the aggregation point for data coming from multiple systems of record (SoR). + +From Cisco DNA Center into Nautobot, it synchronizes the following objects: + +| DNA Center | Nautobot | +| ----------------------- | ---------------------------- | +| Areas | Location* | +| Buildings | Location* | +| Floors | Location* | +| Devices | Devices** | +| Ports | Interfaces | +| Prefixes | Prefixes | +| IP Addresses | IP Addresses | + +`*` As of SSoT 3.2.0 the LocationType for Areas, Buildings, and Floors can be defined in the Job form. Prior to SSoT 3.2.0 the DNA Center integration creates a Region, Site, and Floor LocationType and imports Areas as Region Locations, Buildings as Site Locations, and Floors as Floor Locations. + +`**` If the [Device Lifecycle Nautobot app](https://github.com/nautobot/nautobot-app-device-lifecycle-mgmt) is found to be installed, a matching Version will be created with a RelationshipAssociation connecting the Device and that Version. + +## Usage + +Once the app is installed and configured, you will be able to perform an inventory ingestion from DNA Center into Nautobot. From the Nautobot SSoT Dashboard view (`/plugins/ssot/`), DNA Center will show as a Data Source. + +![Dashboard View](../../images/dnac_dashboard.png) + +From the Dashboard, you can also view more information about the App by clicking on the `DNA Center to Nautobot` link and see the Detail view. This view will show the mappings of DNA Center objects to Nautobot objects, the sync history, and other configuration details for the App: + +![Detail View](../../images/dnac_detail-view.png) + +In order to utilize this integration you must first enable the Job. You can find the available installed Jobs under Jobs -> Jobs: + +![Job List](../../images/dnac_job_list.png) + +To enable the Job you must click on the orange pencil icon to the right of the `DNA Center to Nautobot` Job. You will be presented with the settings for the Job as shown below: + +![Job Settings](../../images/dnac_job_settings.png) + +You'll need to check the `Enabled` checkbox and then the `Update` button at the bottom of the page. You will then see that the play button next to the Job changes to blue and becomes functional, linking to the Job form. + +![Enabled Job](../../images/dnac_enabled_job.png) + +Once the Job is enabled, you'll need to manually create a few objects in Nautobot to use with the Job. First, you'll need to create a Secret that contains the username and password for authenticating to your desired DNA Center instance: + +![Username Secret](../../images/dnac_username_secret.png) + +![Password Secret](../../images/dnac_password_secret.png) + +Once the required Secrets are created, you'll need to create a SecretsGroup that pairs them together and defines the Access Type of HTTP(S) like shown below: + +![DNAC SecretsGroup](../../images/dnac_secretsgroup.png) + +With the SecretsGroup defined containing your instance credentials you'll then need to create an ExternalIntegration object to store the information about the DNA Center instance you wish to synchronize with. + +![DNAC ExternalIntegration](../../images/dnac_external_integration.png) + +> The only required portions are the Name, Remote URL, Verify SSL, and Secrets Group. The `Extra Config` section allows you to specify the port that DNA Center is running on. It will default to 443 if unspecified. + +![DNAC ExternalIntegration](../../images/dnac_external_integration_adv.png) + +The final step before running the Job is to create a Controller that references the ExternalIntegration that you just created. You can attach a `Managed Device Group` to the Controller for all imported Devices to be placed in. If you don't create a Managed Device Group, one will be created automatically and associated to the specified Controller with the name of ` Managed Devices`. + +![DNAC Controller](../../images/dnac_controller.png) + +> You can utilize multiple DNA Center Controllers with this integration as long as you specify a unique Tenant per Controller. The failure to use differing Tenants will have the Devices, Prefixes, and IPAddresses potentially removed if they are non-existent on the additional Controller. Locations should remain unaffected. + +With those configured, you will then need to define a LocationType to use for each DNA Center location type of Areas, Buildings, and Floors. With those created, you can run the Job to start the synchronization: + +> When creating the Area LocationType you must check the "Nestable" option. + +![Job Form](../../images/dnac_job_form.png) + +If you wish to just test the synchronization but not have any data created in Nautobot you'll want to select the `Dryrun` toggle. Clicking the `Debug` toggle will enable more verbose logging to inform you of what is occuring behind the scenes. Finally, the `Bulk import` option will enable bulk create and update operations to be used when the synchronization is complete. This can improve performance times for the integration by forsaking validation of the imported data. Be aware that this could potentially cause bad data to be pushed into Nautobot. After those toggles there are also dropdowns that allow you to specify the DNA Center Controller to synchronize with and to define the LocationTypes to use for the imported Areas, Buildings, and Floors from DNA Center. In addition, there are also some optional settings on the Job form: + +- The Location Mapping allows you to define a dictionary of Location mappings. This feature is intended for specifying parent Locations for the Areas and Building locations in DNA Center. This is useful if this information is missing from DNA Center but required for Nautobot or to allow you to change the information as it's imported to match information from another System of Record. The expected pattern for this field is `{"": {"parent": ""}}`. + +- Finally there is an option to specify a Tenant to be assigned to the imported Devices, Prefixes, and IPAddreses. This is handy for cases where you have multiple DNA Center instances that are used by differing business units. + +Running this Job will redirect you to a `Nautobot Job Result` view. + +![JobResult View](../../images/dnac_jobresult.png) + +Once the Job has finished you can click on the `SSoT Sync Details` button at the top right of the Job Result page to see detailed information about the data that was synchronized from DNA Center and the outcome of the sync Job. + +![SSoT Sync Details](../../images/dnac_ssot-sync-details.png) diff --git a/docs/user/integrations/index.md b/docs/user/integrations/index.md index bb5b03b2e..961403d24 100644 --- a/docs/user/integrations/index.md +++ b/docs/user/integrations/index.md @@ -4,8 +4,11 @@ This Nautobot app supports the following integrations: - [Cisco ACI](./aci.md) - [Arista CloudVision](./aristacv.md) +- [Bootstrap](./bootstrap.md) - [Device42](./device42.md) +- [Cisco DNA Center](./dna_center.md) - [Infoblox](./infoblox.md) - [IPFabric](./ipfabric.md) - [Itential](./itential.md) +- [Cisco Meraki](./meraki.md) - [ServiceNow](./servicenow.md) diff --git a/docs/user/integrations/meraki.md b/docs/user/integrations/meraki.md new file mode 100644 index 000000000..de364ef42 --- /dev/null +++ b/docs/user/integrations/meraki.md @@ -0,0 +1,87 @@ +# Cisco Meraki SSoT Integration + +The Cisco Meraki SSoT integration is built as part of the [Nautobot Single Source of Truth (SSoT)](https://github.com/nautobot/nautobot-app-ssot) app. The SSoT app enables Nautobot to be the aggregation point for data coming from multiple systems of record (SoR). + +From Cisco Meraki into Nautobot, it synchronizes the following objects: + +| Meraki | Nautobot | +| ----------------------- | ---------------------------- | +| Networks | Location* | +| Devices | Devices | +| Hardwares | DeviceTypes | +| OSVersions | SoftwareVersions | +| Ports | Interfaces | +| Prefixes | Prefixes | +| IP Addresses | IP Addresses | + +`*` As of SSoT 3.2.0 the LocationType for Networks can be defined in the Job form. + +## Usage + +Once the app is installed and configured, you will be able to perform an inventory ingestion from Meraki into Nautobot. From the Nautobot SSoT Dashboard view (`/plugins/ssot/`), Meraki will show as a Data Source. + +![Dashboard View](../../images/meraki_dashboard.png) + +From the Dashboard, you can also view more information about the App by clicking on the `Meraki to Nautobot` link and see the Detail view. This view will show the mappings of Meraki objects to Nautobot objects, the sync history, and other configuration details for the App: + +![Detail View](../../images/meraki_detail-view.png) + +In order to utilize this integration you must first enable the Job. You can find the available installed Jobs under Jobs -> Jobs: + +![Job List](../../images/meraki_job_list.png) + +To enable the Job you must click on the orange pencil icon to the right of the `Meraki to Nautobot` Job. You will be presented with the settings for the Job as shown below: + +![Job Settings](../../images/meraki_job_settings.png) + +You'll need to check the `Enabled` checkbox and then the `Update` button at the bottom of the page. You will then see that the play button next to the Job changes to blue and becomes functional, linking to the Job form. + +![Enabled Job](../../images/meraki_enabled_job.png) + +Once the Job is enabled, you'll need to manually create a few objects in Nautobot to use with the Job. First, you'll need to create a Secret that contains your organization ID and token for authenticating to your desired Meraki instance: + +![Org ID Secret](../../images/meraki_org_id_secret.png) + +![Token Secret](../../images/meraki_token_secret.png) + +Once the required Secrets are created, you'll need to create a SecretsGroup that pairs them together and defines the Access Type of HTTP(S) like shown below: + +![DNAC SecretsGroup](../../images/meraki_secretsgroup.png) + +With the SecretsGroup defined containing your instance credentials you'll then need to create an ExternalIntegration object to store the information about the Meraki instance you wish to synchronize with. + +![DNAC ExternalIntegration](../../images/meraki_external_integration.png) + +> The only required portions are the Name, Remote URL, Verify SSL, and Secrets Group. + +The final step before running the Job is to create a Controller that references the ExternalIntegration that you just created. You can attach a `Managed Device Group` to the Controller for all imported Devices to be placed in. If you don't create a Managed Device Group, one will be created automatically and associated to the specified Controller with the name of ` Managed Devices`. + +![DNAC Controller](../../images/meraki_controller.png) + +> You can utilize multiple Meraki Controllers with this integration as long as you specify a unique Tenant per Controller. The failure to use differing Tenants will have the Devices, Prefixes, and IPAddresses potentially removed if they are non-existent on the additional Controller. Locations should remain unaffected. + +With those configured, you will then need to define a LocationType to use for the imported Networks. With those created, you can run the Job to start the synchronization: + +![Job Form](../../images/meraki_job_form.png) + +If you wish to just test the synchronization but not have any data created in Nautobot you'll want to select the `Dryrun` toggle. Clicking the `Debug` toggle will enable more verbose logging to inform you of what is occuring behind the scenes. After those toggles there are also dropdowns that allow you to specify the Meraki Controller to synchronize with and to define the LocationType to use for the imported Networks from Meraki. In addition, there are also some optional settings on the Job form: + +- Should the LocationType that you specify for the imported Networks require a parent Location to be assigned, you can define this parent one of two ways: + +1. The Parent Location field allows you to define a singular Location that will be assigned as the parent for all imported Network Locations. + +2. The Location Mapping field allows you to define a dictionary of Location mappings. This feature is intended for specifying parent Locations for the Network Locations in Meraki. This is useful if this information is missing from Meraki but required for Nautobot or to allow you to change the information as it's imported to match information from another System of Record. The expected pattern for this field is `{"": {"parent": ""}}`. + +In addition, there are two methods provided to assign Roles to your imported Devices: + +1. The Hostname Mapping field allows you to specify list of tuples containing a regular expression pattern to match against Device hostnames and the Role to assign if matched. Ex: [(".*FW.*", "Firewall")] + +2. The DeviceType Mapping field allows you to specify a list of tuples containing a string to match against the Device model and the Role to assign when matched. The string to match can simply be the series letters, ie MX for firewalls, or explicit models like MX84. Ex: [("MS", "Switch")] + +- Finally there is an option to specify a Tenant to be assigned to the imported Devices, Prefixes, and IPAddreses. This is handy for cases where you have multiple Meraki instances that are used by differing business units. + +Running this Job will redirect you to a `Nautobot Job Result` view. + +![JobResult View](../../images/meraki_jobresult.png) + +Once the Job has finished you can click on the `SSoT Sync Details` button at the top right of the Job Result page to see detailed information about the data that was synchronized from Meraki and the outcome of the sync Job. diff --git a/docs/user/performance.md b/docs/user/performance.md index 60b6d3198..fe2356693 100644 --- a/docs/user/performance.md +++ b/docs/user/performance.md @@ -17,7 +17,7 @@ In brief, the following general steps can be followed: 2. Define a `DiffSync` adapter class for loading initial data from Nautobot and constructing instances of each `DiffSyncModel` class to represent that data. 3. Define a `DiffSync` adapter class for loading initial data from the Data Source or Data Target system and constructing instances of the `DiffSyncModel` classes to represent that data. -4. Develop a Job class, derived from either the `DataSource` or `DataTarget` classes provided by this app, and implement the adapters to populate the `self.source_adapter` and `self.target_adapter` that are used by the built-in implementation of `sync_data`. This `sync_data` method is an opinionated way of running the process including some performance data, more in [next section](#analyze-job-performance), but you could overwrite it completely or any of the key hooks that it calls: +4. Develop a Job class, derived from either the `DataSource` or `DataTarget` classes provided by this app, and implement the adapters to populate the `self.source_adapter` and `self.target_adapter` that are used by the built-in implementation of `sync_data`. This `sync_data` method is an opinionated way of running the process including some performance data, more in [next section](#optimizing-for-execution-time), but you could overwrite it completely or any of the key hooks that it calls: - `self.load_source_adapter`: This is mandatory to be implemented. As an example: diff --git a/invoke.example.yml b/invoke.example.yml index a71bc4ffa..7102a2dad 100644 --- a/invoke.example.yml +++ b/invoke.example.yml @@ -1,6 +1,6 @@ --- nautobot_ssot: - nautobot_ver: "2.0.0" + nautobot_ver: "2.1.0" python_ver: "3.11" # local: false # compose_dir: "/full/path/to/nautobot-app-ssot/development" diff --git a/mkdocs.yml b/mkdocs.yml index a83afa16d..2bac83284 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -110,10 +110,13 @@ nav: - "user/integrations/index.md" - Cisco ACI: "user/integrations/aci.md" - Arista CloudVision: "user/integrations/aristacv.md" + - Bootstrap: "user/integrations/bootstrap.md" - Device42: "user/integrations/device42.md" + - DNA Center: "user/integrations/dna_center.md" - Infoblox: "user/integrations/infoblox.md" - IPFabric: "user/integrations/ipfabric.md" - Itential: "user/integrations/itential.md" + - Cisco Meraki: "user/integrations/meraki.md" - ServiceNow: "user/integrations/servicenow.md" - Modeling: "user/modeling.md" - Performance: "user/performance.md" @@ -125,16 +128,20 @@ nav: - "admin/integrations/index.md" - Cisco ACI: "admin/integrations/aci_setup.md" - Arista CloudVision: "admin/integrations/aristacv_setup.md" + - Bootstrap: "admin/integrations/bootstrap_setup.md" - Device42: "admin/integrations/device42_setup.md" + - DNA Center: "admin/integrations/dna_center_setup.md" - Infoblox: "admin/integrations/infoblox_setup.md" - IPFabric: "admin/integrations/ipfabric_setup.md" - Itential: "admin/integrations/itential_setup.md" + - Cisco Meraki: "admin/integrations/meraki_setup.md" - ServiceNow: "admin/integrations/servicenow_setup.md" - Upgrade: "admin/upgrade.md" - Uninstall: "admin/uninstall.md" - Compatibility Matrix: "admin/compatibility_matrix.md" - Release Notes: - "admin/release_notes/index.md" + - v3.2: "admin/release_notes/version_3.2.md" - v3.1: "admin/release_notes/version_3.1.md" - v3.0: "admin/release_notes/version_3.0.md" - v2.8: "admin/release_notes/version_2.8.md" diff --git a/nautobot_ssot/__init__.py b/nautobot_ssot/__init__.py index bb805c222..b769fbee3 100644 --- a/nautobot_ssot/__init__.py +++ b/nautobot_ssot/__init__.py @@ -17,10 +17,13 @@ _CONFLICTING_APP_NAMES = [ "nautobot_ssot_aci", "nautobot_ssot_aristacv", + "nautobot_ssot_bootstrap", "nautobot_ssot_device42", "nautobot_ssot_dna_center", "nautobot_ssot_infoblox", "nautobot_ssot_ipfabric", + "nautobot_ssot_itential", + "nautobot_ssot_meraki", "nautobot_ssot_servicenow", ] @@ -115,6 +118,7 @@ class NautobotSSOTAppConfig(NautobotAppConfig): } caching_config = {} config_view_name = "plugins:nautobot_ssot:config" + docs_view_name = "plugins:nautobot_ssot:docs" def ready(self): """Trigger callback when database is ready.""" diff --git a/nautobot_ssot/exceptions.py b/nautobot_ssot/exceptions.py new file mode 100644 index 000000000..105ad16b1 --- /dev/null +++ b/nautobot_ssot/exceptions.py @@ -0,0 +1,67 @@ +"""Custom Exceptions to be used with SSoT integrations.""" + + +class AdapterLoadException(Exception): + """Raised when there's an error while loading data.""" + + +class AuthFailure(Exception): + """Exception raised when authenticating to endpoint fails.""" + + def __init__(self, error_code, message): + """Populate exception information.""" + self.expression = error_code + self.message = message + super().__init__(self.message) + + +class ConfigurationError(Exception): + """Exception thrown when Job configuration is wrong.""" + + +class JobException(Exception): + """Exception raised when failure loading integration Job.""" + + def __init__(self, message): + """Populate exception information.""" + self.message = message + super().__init__(self.message) + + +class InvalidUrlScheme(Exception): + """Exception raised for wrong scheme being passed for URL. + + Attributes: + message (str): Returned explanation of Error. + """ + + def __init__(self, scheme): + """Initialize Exception with wrong scheme in message.""" + self.message = f"Invalid URL scheme '{scheme}' found!" + super().__init__(self.message) + + +class MissingConfigSetting(Exception): + """Exception raised for missing configuration settings. + + Attributes: + message (str): Returned explanation of Error. + """ + + def __init__(self, setting): + """Initialize Exception with Setting that is missing and message.""" + self.setting = setting + self.message = f"Missing configuration setting - {setting}!" + super().__init__(self.message) + + +class MissingSecretsGroupException(Exception): + """Custom Exception in case SecretsGroup is not found on ExternalIntegration.""" + + +class RequestConnectError(Exception): + """Exception class to be raised upon requests module connection errors.""" + + +class RequestHTTPError(Exception): + """Exception class to be raised upon requests module HTTP errors.""" diff --git a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py index 9ff461070..6f490054d 100644 --- a/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py +++ b/nautobot_ssot/integrations/aci/diffsync/adapters/aci.py @@ -435,8 +435,8 @@ def load_devices(self): site=self.site, site_tag=self.site, controller_group=( - self.job.apic.controller_managed_device_group.name - if self.job.apic.controller_managed_device_group + self.job.apic.controller_managed_device_groups.first().name + if self.job.apic.controller_managed_device_groups.count() != 0 else "" ), ) diff --git a/nautobot_ssot/integrations/aci/diffsync/client.py b/nautobot_ssot/integrations/aci/diffsync/client.py index c51b73440..9eb220428 100644 --- a/nautobot_ssot/integrations/aci/diffsync/client.py +++ b/nautobot_ssot/integrations/aci/diffsync/client.py @@ -11,6 +11,8 @@ import requests import urllib3 +from nautobot_ssot.exceptions import RequestConnectError, RequestHTTPError + from .utils import ( ap_from_dn, bd_from_dn, @@ -26,14 +28,6 @@ logger = logging.getLogger(__name__) -class RequestConnectError(Exception): - """Exception class to be raised upon requests module connection errors.""" - - -class RequestHTTPError(Exception): - """Exception class to be raised upon requests module HTTP errors.""" - - class AciApi: """Representation and methods for interacting with aci.""" diff --git a/nautobot_ssot/integrations/aci/diffsync/models/nautobot.py b/nautobot_ssot/integrations/aci/diffsync/models/nautobot.py index 479678f8e..737ea1024 100644 --- a/nautobot_ssot/integrations/aci/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/aci/diffsync/models/nautobot.py @@ -449,7 +449,7 @@ def create(cls, adapter, ids, attrs): try: vrf_tenant = OrmTenant.objects.get(name=attrs["vrf_tenant"]) except OrmTenant.DoesNotExist: - adapter.job.logger.warning(f"Tenant {attrs['vrf_tenant']} not found for VRF {attrs['vrf']}") + adapter.job.logger.warning(f"Tenant {attrs['vrf_tenant']} not found for VRF {ids['vrf']}") vrf_tenant = None return None diff --git a/nautobot_ssot/integrations/aci/jobs.py b/nautobot_ssot/integrations/aci/jobs.py index 046cd647b..c851ea4f1 100644 --- a/nautobot_ssot/integrations/aci/jobs.py +++ b/nautobot_ssot/integrations/aci/jobs.py @@ -5,6 +5,7 @@ from nautobot.dcim.models import Controller, Location from nautobot.extras.jobs import BooleanVar, Job, ObjectVar +from nautobot_ssot.exceptions import ConfigurationError from nautobot_ssot.integrations.aci.diffsync.adapters.aci import AciAdapter from nautobot_ssot.integrations.aci.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.integrations.aci.diffsync.client import AciApi @@ -14,10 +15,6 @@ name = "Cisco ACI SSoT" # pylint: disable=invalid-name, abstract-method -class ConfigurationError(Exception): - """Exception thrown when Job configuration is wrong.""" - - class AciDataSource(DataSource, Job): # pylint: disable=abstract-method, too-many-instance-attributes """ACI SSoT Data Source.""" diff --git a/nautobot_ssot/integrations/aristacv/jobs.py b/nautobot_ssot/integrations/aristacv/jobs.py index 735318059..6162dbecc 100644 --- a/nautobot_ssot/integrations/aristacv/jobs.py +++ b/nautobot_ssot/integrations/aristacv/jobs.py @@ -7,6 +7,7 @@ from nautobot.dcim.models import DeviceType from nautobot.extras.jobs import BooleanVar, Job +from nautobot_ssot.exceptions import MissingConfigSetting from nautobot_ssot.integrations.aristacv.diffsync.adapters.cloudvision import CloudvisionAdapter from nautobot_ssot.integrations.aristacv.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.integrations.aristacv.utils.cloudvision import CloudvisionApi @@ -16,20 +17,6 @@ name = "SSoT - Arista CloudVision" # pylint: disable=invalid-name -class MissingConfigSetting(Exception): - """Exception raised for missing configuration settings. - - Attributes: - message (str): Returned explanation of Error. - """ - - def __init__(self, setting): - """Initialize Exception with Setting that is missing and message.""" - self.setting = setting - self.message = f"Missing configuration setting - {setting}!" - super().__init__(self.message) - - class CloudVisionDataSource(DataSource, Job): # pylint: disable=abstract-method """CloudVision SSoT Data Source.""" diff --git a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py index 0477a7a85..49eaebd34 100644 --- a/nautobot_ssot/integrations/aristacv/utils/cloudvision.py +++ b/nautobot_ssot/integrations/aristacv/utils/cloudvision.py @@ -22,6 +22,7 @@ from cvprac.cvp_client import CvpClient, CvpLoginError from google.protobuf.wrappers_pb2 import StringValue # pylint: disable=no-name-in-module +from nautobot_ssot.exceptions import AuthFailure from nautobot_ssot.integrations.aristacv.constants import PORT_TYPE_MAP from nautobot_ssot.integrations.aristacv.types import CloudVisionAppConfig @@ -31,16 +32,6 @@ UPDATES_TYPE = List[UPDATE_TYPE] -class AuthFailure(Exception): - """Exception raised when authenticating to on-prem CVP fails.""" - - def __init__(self, error_code, message): - """Populate exception information.""" - self.expression = error_code - self.message = message - super().__init__(self.message) - - class CloudvisionApi: # pylint: disable=too-many-instance-attributes, too-many-arguments """Arista CloudVision gRPC client.""" @@ -65,7 +56,7 @@ def __init__(self, config: CloudVisionAppConfig): call_creds = grpc.access_token_call_credentials(token) elif config.cvp_user != "" and config.cvp_password != "": response = requests.post( - f"{parsed_url.hostname}:{parsed_url.port}/cvpservice/login/authenticate.do", + f"{parsed_url.scheme}://{parsed_url.hostname}:{parsed_url.port}/cvpservice/login/authenticate.do", auth=(config.cvp_user, config.cvp_password), timeout=60, verify=config.verify_ssl, diff --git a/nautobot_ssot/integrations/bootstrap/__init__.py b/nautobot_ssot/integrations/bootstrap/__init__.py new file mode 100644 index 000000000..85e31f0b4 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/__init__.py @@ -0,0 +1 @@ +"""Plugin declaration for bootstrap.""" diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py new file mode 100644 index 000000000..77e2b2577 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/__init__.py @@ -0,0 +1 @@ +"""Adapter classes for loading DiffSyncModels with data from bootstrap or Nautobot.""" diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py new file mode 100755 index 000000000..76a09a66c --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/bootstrap.py @@ -0,0 +1,1073 @@ +"""Nautobot Ssot Bootstrap Adapter for bootstrap SSoT plugin.""" + +import datetime +import json +import os + +import yaml +from diffsync import Adapter +from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound +from django.conf import settings +from nautobot.extras.datasources.git import ensure_git_repository +from nautobot.extras.models import GitRepository + +from nautobot_ssot.integrations.bootstrap.diffsync.models.bootstrap import ( + BootstrapCircuit, + BootstrapCircuitTermination, + BootstrapCircuitType, + BootstrapComputedField, + BootstrapContact, + BootstrapDynamicGroup, + BootstrapGitRepository, + BootstrapGraphQLQuery, + BootstrapLocation, + BootstrapLocationType, + BootstrapManufacturer, + BootstrapNamespace, + BootstrapPlatform, + BootstrapPrefix, + BootstrapProvider, + BootstrapProviderNetwork, + BootstrapRiR, + BootstrapRole, + BootstrapSecret, + BootstrapSecretsGroup, + BootstrapTag, + BootstrapTeam, + BootstrapTenant, + BootstrapTenantGroup, + BootstrapVLAN, + BootstrapVLANGroup, + BootstrapVRF, +) +from nautobot_ssot.integrations.bootstrap.utils import ( + is_running_tests, + lookup_content_type, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + from nautobot_ssot.integrations.bootstrap.diffsync.models.bootstrap import ( # noqa: F401 + BootstrapSoftware, + BootstrapSoftwareImage, + BootstrapValidatedSoftware, + ) + + +class LabelMixin: + """Add labels onto Nautobot objects to provide information on sync status with Bootstrap.""" + + def label_imported_objects(self, target): + """Add CustomFields to all objects that were successfully synced to the target.""" + _model_list = [ + "tag", + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "graph_ql_query", + ] + + if LIFECYCLE_MGMT: + _model_list.append( + "software", + "software_image", + "validated_software", + ) + + for modelname in _model_list: + for local_instance in self.get_all(modelname): + unique_id = local_instance.get_unique_id() + # Verify that the object now has a counterpart in the target DiffSync + try: + target.get(modelname, unique_id) + except ObjectNotFound: + continue + + self.label_object(modelname, unique_id) + + def label_object(self, modelname, unique_id): + """Apply the given CustomField to the identified object.""" + + def _label_object(nautobot_object): + """Apply custom field to object, if applicable.""" + nautobot_object.custom_field_data["last_synced_from_sor"] = today + nautobot_object.custom_field_data["system_of_record"] = os.getenv("SYSTEM_OF_RECORD", "Bootstrap") + nautobot_object.validated_save() + + today = datetime.today().date().isoformat() + + +class BootstrapAdapter(Adapter, LabelMixin): + """DiffSync adapter for Bootstrap.""" + + tenant_group = BootstrapTenantGroup + tenant = BootstrapTenant + role = BootstrapRole + manufacturer = BootstrapManufacturer + platform = BootstrapPlatform + location_type = BootstrapLocationType + location = BootstrapLocation + team = BootstrapTeam + contact = BootstrapContact + provider = BootstrapProvider + provider_network = BootstrapProviderNetwork + circuit_type = BootstrapCircuitType + circuit = BootstrapCircuit + circuit_termination = BootstrapCircuitTermination + namespace = BootstrapNamespace + rir = BootstrapRiR + vlan_group = BootstrapVLANGroup + vlan = BootstrapVLAN + vrf = BootstrapVRF + prefix = BootstrapPrefix + secret = BootstrapSecret + secrets_group = BootstrapSecretsGroup + git_repository = BootstrapGitRepository + dynamic_group = BootstrapDynamicGroup + computed_field = BootstrapComputedField + tag = BootstrapTag + graph_ql_query = BootstrapGraphQLQuery + + if LIFECYCLE_MGMT: + software = BootstrapSoftware + software_image = BootstrapSoftwareImage + validated_software = BootstrapValidatedSoftware + + top_level = [ + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "tag", + "graph_ql_query", + ] + + if LIFECYCLE_MGMT: + top_level.append("software") + top_level.append("software_image") + top_level.append("validated_software") + + def __init__(self, *args, job=None, sync=None, client=None, **kwargs): # noqa: D417 + """Initialize bootstrap. + + Args: + job (object, optional): bootstrap job. Defaults to None. + sync (object, optional): bootstrap DiffSync. Defaults to None. + client (object): bootstrap API client connection object. + """ + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + self.conn = client + + def load_tenant_group(self, bs_tenant_group, branch_vars): + """Load TenantGroup objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap TenantGroup: {bs_tenant_group}") + + try: + self.get(self.tenant_group, bs_tenant_group["name"]) + except ObjectNotFound: + new_tenant_group = self.tenant_group( + name=bs_tenant_group["name"], + parent=bs_tenant_group["parent"] if not None else None, + description=bs_tenant_group["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_tenant_group) + + def load_tenant(self, bs_tenant, branch_vars): + """Load Tenant objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Tenant: {bs_tenant}") + + try: + self.get(self.tenant, bs_tenant["name"]) + except ObjectNotFound: + new_tenant = self.tenant( + name=bs_tenant["name"], + tenant_group=bs_tenant["tenant_group"] if not None else None, + description=bs_tenant["description"], + tags=bs_tenant["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_tenant) + + def load_role(self, bs_role, branch_vars): + """Load Role objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Role {bs_role}") + + if len(bs_role["content_types"]) > 1: + _content_types = bs_role["content_types"] + _content_types.sort() + else: + _content_types = bs_role["content_types"] + try: + self.get(self.role, bs_role["name"]) + except ObjectNotFound: + new_role = self.role( + name=bs_role["name"], + weight=bs_role["weight"], + description=bs_role["description"], + color=bs_role["color"] if not None else "9e9e9e", + content_types=_content_types, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_role) + + def load_manufacturer(self, bs_manufacturer, branch_vars): + """Load Manufacturer objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Boostrap Manufacturer {bs_manufacturer}") + + try: + self.get(self.manufacturer, bs_manufacturer["name"]) + except ObjectNotFound: + new_manufacturer = self.manufacturer( + name=bs_manufacturer["name"], + description=bs_manufacturer["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_manufacturer) + + def load_platform(self, bs_platform, branch_vars): + """Load Platform objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Platform {bs_platform}") + + try: + self.get(self.platform, bs_platform["name"]) + except ObjectNotFound: + new_platform = self.platform( + name=bs_platform["name"], + manufacturer=bs_platform["manufacturer"], + network_driver=bs_platform["network_driver"], + napalm_driver=bs_platform["napalm_driver"], + napalm_arguments=bs_platform["napalm_arguments"], + description=bs_platform["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_platform) + + def load_location_type(self, bs_location_type, branch_vars): + """Load LocationType objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap LocationType {bs_location_type}") + + try: + self.get(self.location_type, bs_location_type["name"]) + except ObjectNotFound: + _content_types = [] + if bs_location_type["parent"]: + _parent = bs_location_type["parent"] + else: + _parent = None + if len(bs_location_type["content_types"]) > 1: + _content_types = bs_location_type["content_types"] + _content_types.sort() + new_location_type = self.location_type( + name=bs_location_type["name"], + parent=_parent, + nestable=bs_location_type["nestable"], + description=bs_location_type["description"], + content_types=_content_types, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_location_type) + + def load_location(self, bs_location, branch_vars): + """Load Location objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Location {bs_location}") + + try: + self.get(self.location, bs_location["name"]) + except ObjectNotFound: + if bs_location["parent"]: + _parent = bs_location["parent"] + else: + _parent = None + if bs_location["tenant"]: + _tenant = bs_location["tenant"] + else: + _tenant = None + new_location = self.location( + name=bs_location["name"], + location_type=bs_location["location_type"], + parent=_parent, + status=bs_location["status"], + facility=bs_location["facility"], + asn=bs_location["asn"], + time_zone=bs_location["time_zone"], + description=bs_location["description"], + tenant=_tenant, + physical_address=bs_location["physical_address"], + shipping_address=bs_location["shipping_address"], + latitude=bs_location["latitude"], + longitude=bs_location["longitude"], + contact_name=bs_location["contact_name"], + contact_phone=bs_location["contact_phone"], + contact_email=bs_location["contact_email"], + tags=bs_location["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_location) + + def load_team(self, bs_team, branch_vars): + """Load Team objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Team {bs_team}") + + if "contacts" in bs_team: + _contacts = [] + for _contact in bs_team["contacts"]: + _contacts.append(_contact) + _contacts.sort() + try: + self.get(self.team, bs_team["name"]) + except ObjectNotFound: + new_team = self.team( + name=bs_team["name"], + phone=bs_team["phone"], + email=bs_team["email"], + address=bs_team["address"], + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts=_contacts, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_team) + + def load_contact(self, bs_contact, branch_vars): + """Load Contact objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Boostrap Contact {bs_contact}") + + if "teams" in bs_contact: + _teams = [] + for _team in bs_contact["teams"]: + _teams.append(_team) + _teams.sort() + try: + self.get(self.contact, bs_contact["name"]) + except ObjectNotFound: + new_contact = self.contact( + name=bs_contact["name"], + phone=bs_contact["phone"], + email=bs_contact["email"], + address=bs_contact["address"], + teams=_teams, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_contact) + + def load_provider(self, bs_provider, branch_vars): + """Load Provider objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Provider {bs_provider}") + + try: + self.get(self.provider, bs_provider["name"]) + except ObjectNotFound: + new_provider = self.provider( + name=bs_provider["name"], + asn=bs_provider["asn"], + account_number=bs_provider["account_number"], + portal_url=bs_provider["portal_url"], + noc_contact=bs_provider["noc_contact"], + admin_contact=bs_provider["admin_contact"], + tags=bs_provider["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_provider) + + def load_provider_network(self, bs_provider_network, branch_vars): + """Load ProviderNetwork objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap ProviderNetwork {bs_provider_network}") + + try: + self.get(self.provider_network, bs_provider_network["name"]) + except ObjectNotFound: + new_provider_network = self.provider_network( + name=bs_provider_network["name"], + provider=bs_provider_network["provider"], + description=bs_provider_network["description"], + comments=bs_provider_network["comments"], + tags=bs_provider_network["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_provider_network) + + def load_circuit_type(self, bs_circuit_type, branch_vars): + """Load CircuitType objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap CircuitType {bs_circuit_type} into DiffSync models.") + + try: + self.get(self.circuit_type, bs_circuit_type["name"]) + except ObjectNotFound: + new_circuit_type = self.circuit_type( + name=bs_circuit_type["name"], + description=bs_circuit_type["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_circuit_type) + + def load_circuit(self, bs_circuit, branch_vars): + """Load Circuit objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Circuit {bs_circuit} into DiffSync models.") + + try: + self.get(self.circuit, bs_circuit["circuit_id"]) + except ObjectNotFound: + new_circuit = self.circuit( + circuit_id=bs_circuit["circuit_id"], + provider=bs_circuit["provider"], + circuit_type=bs_circuit["circuit_type"], + status=bs_circuit["status"], + date_installed=bs_circuit["date_installed"], + commit_rate_kbps=bs_circuit["commit_rate_kbps"], + description=bs_circuit["description"], + tags=bs_circuit["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_circuit) + + def load_circuit_termination(self, bs_circuit_termination, branch_vars): + """Load CircuitTermination objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug( + f"Loading Bootstrap CircuitTermination {bs_circuit_termination} into DiffSync models." + ) + _parts = bs_circuit_termination["name"].split("__") + _circuit_id = _parts[0] + _provider = _parts[1] + _term_side = _parts[2] + try: + self.get(self.circuit_termination, bs_circuit_termination["name"]) + except ObjectNotFound: + new_circuit_termination = self.circuit_termination( + name=bs_circuit_termination["name"], + termination_type=bs_circuit_termination["termination_type"], + termination_side=_term_side, + circuit_id=_circuit_id, + location=(bs_circuit_termination["location"] if bs_circuit_termination["location"] != "" else None), + provider_network=( + bs_circuit_termination["provider_network"] + if bs_circuit_termination["provider_network"] != "" + else None + ), + port_speed_kbps=bs_circuit_termination["port_speed_kbps"], + upstream_speed_kbps=bs_circuit_termination["upstream_speed_kbps"], + cross_connect_id=bs_circuit_termination["cross_connect_id"], + patch_panel_or_ports=bs_circuit_termination["patch_panel_or_ports"], + description=bs_circuit_termination["description"], + tags=bs_circuit_termination["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_circuit_termination) + try: + _circuit = self.get(self.circuit, {"circuit_id": _circuit_id, "provider": _provider}) + _circuit.add_child(new_circuit_termination) + except ObjectAlreadyExists as err: + self.job.logger.warning(f"CircuitTermination for {_circuit} already exists. {err}") + except ObjectNotFound as err: + self.job.logger.warning(f"Circuit {_circuit_id} not found. {err}") + + def load_namespace(self, bs_namespace, branch_vars): + """Load Namespace objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Namespace {bs_namespace}.") + try: + self.get(self.namespace, bs_namespace["name"]) + except ObjectNotFound: + new_namespace = self.namespace( + name=bs_namespace["name"], + description=bs_namespace["description"], + location=bs_namespace["location"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_namespace) + + def load_rir(self, bs_rir, branch_vars): + """Load RiR objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap RiR {bs_rir}.") + try: + self.get(self.rir, bs_rir["name"]) + except ObjectNotFound: + new_rir = self.rir( + name=bs_rir["name"], + private=bs_rir["private"] if bs_rir["private"] else False, + description=bs_rir["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_rir) + + def load_vlan_group(self, bs_vlan_group, branch_vars): + """Load VLANGroup objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap VLANGroup {bs_vlan_group}.") + try: + self.get(self.vlan_group, bs_vlan_group["name"]) + except ObjectNotFound: + new_vlan_group = self.vlan_group( + name=bs_vlan_group["name"], + location=bs_vlan_group["location"], + description=bs_vlan_group["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_vlan_group) + + def load_vlan(self, bs_vlan, branch_vars): + """Load VLAN objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap VLAN {bs_vlan}.") + try: + self.get( + self.vlan, + { + "name": bs_vlan["name"], + "vid": bs_vlan["vid"], + "vlan_group": (bs_vlan["vlan_group"] if bs_vlan["vlan_group"] else None), + }, + ) + except ObjectNotFound: + new_vlan = self.vlan( + name=bs_vlan["name"], + vid=bs_vlan["vid"], + description=bs_vlan["description"], + status=bs_vlan["status"] if bs_vlan["status"] else "Active", + role=bs_vlan["role"] if bs_vlan["role"] else None, + locations=bs_vlan["locations"], + vlan_group=bs_vlan["vlan_group"] if bs_vlan["vlan_group"] else None, + tenant=bs_vlan["tenant"] if bs_vlan["tenant"] else None, + tags=bs_vlan["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_vlan) + + def load_vrf(self, bs_vrf, branch_vars): + """Load VRF objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap VRF {bs_vrf}.") + try: + self.get( + self.vrf, + { + "name": bs_vrf["name"], + "namespace": (bs_vrf["namespace"] if bs_vrf["namespace"] else "Global"), + }, + ) + except ObjectNotFound: + new_vrf = self.vrf( + name=bs_vrf["name"], + namespace=bs_vrf["namespace"] if bs_vrf["namespace"] else "Global", + route_distinguisher=bs_vrf["route_distinguisher"], + description=bs_vrf["description"], + tenant=bs_vrf["tenant"] if bs_vrf["tenant"] else None, + tags=bs_vrf["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_vrf) + + def load_prefix(self, bs_prefix, branch_vars): + """Load Prefix objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Prefix {bs_prefix}.") + try: + self.get( + self.prefix, + { + "network": {bs_prefix["network"]}, + "namespace": {bs_prefix["namespace"] if bs_prefix["namespace"] else "Global"}, + }, + ) + except ObjectNotFound: + _date_allocated = None + if "date_allocated" in bs_prefix and bs_prefix["date_allocated"]: + if isinstance(bs_prefix["date_allocated"], (datetime.date, datetime.datetime)): + _date_allocated = bs_prefix["date_allocated"] + if isinstance(_date_allocated, datetime.date) and not isinstance( + _date_allocated, datetime.datetime + ): + _date_allocated = datetime.datetime.combine(_date_allocated, datetime.time.min) + else: + try: + _date_allocated = datetime.datetime.strptime(bs_prefix["date_allocated"], "%Y-%m-%d %H:%M:%S") + except (TypeError, ValueError): + try: + _date_allocated = datetime.datetime.strptime(bs_prefix["date_allocated"], "%Y-%m-%d") + _date_allocated = _date_allocated.replace(hour=0, minute=0, second=0) + except (TypeError, ValueError): + _date_allocated = None + self.job.logger.warning( + f"Invalid date format for date_allocated: {bs_prefix['date_allocated']}" + ) + new_prefix = self.prefix( + network=bs_prefix["network"], + namespace=(bs_prefix["namespace"] if bs_prefix["namespace"] else "Global"), + prefix_type=(bs_prefix["prefix_type"] if bs_prefix["prefix_type"] else "Network"), + status=bs_prefix["status"] if bs_prefix["status"] else "Active", + role=bs_prefix["role"] if bs_prefix["role"] else None, + rir=bs_prefix["rir"] if bs_prefix["rir"] else None, + date_allocated=_date_allocated, + description=bs_prefix["description"], + vrfs=bs_prefix["vrfs"] if bs_prefix["vrfs"] else None, + locations=bs_prefix["locations"] if bs_prefix["locations"] else None, + vlan=bs_prefix["vlan"] if bs_prefix["vlan"] else None, + tenant=bs_prefix["tenant"] if bs_prefix["tenant"] else None, + tags=bs_prefix["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_prefix) + + def load_secret(self, bs_secret, branch_vars): + """Load Secret objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Secret: {bs_secret}") + if bs_secret["provider"] == "environment-variable": + params = {"variable": bs_secret["parameters"]["variable"]} + elif bs_secret["provider"] == "text-file": + params = {"variable": bs_secret["parameters"]["path"]} + else: + self.job.logger.warning(f"Secret: {bs_secret} is not formatted correctly in the yaml file.") + return + + try: + self.get(self.secret, bs_secret["name"]) + except ObjectNotFound: + new_secret = self.secret( + name=bs_secret["name"], + provider=bs_secret["provider"], + parameters=params, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_secret) + + def load_secrets_group(self, bs_sg, branch_vars): + """Load SecretsGroup objects from Bootstrap into DiffSync models.""" + _secrets = [] + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap SecretsGroup: {bs_sg}") + try: + self.get(self.secrets_group, bs_sg["name"]) + except ObjectNotFound: + for _sec in bs_sg["secrets"]: + _secrets.append(_sec) + _secrets = sorted(_secrets, key=lambda x: x["name"]) + new_secrets_group = self.secrets_group( + name=bs_sg["name"], + secrets=_secrets, + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_secrets_group) + + def load_git_repository(self, git_repo, branch_vars): + """Load GitRepository objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap GitRepository: {git_repo}") + try: + self.get(self.git_repository, git_repo["name"]) + except ObjectNotFound: + _data_types = [] + for con_type in git_repo["provided_data_type"]: + _content_type = lookup_content_type(content_model_path="extras.gitrepository", content_type=con_type) + _data_types.append(_content_type) + if git_repo.get("branch"): + _branch = git_repo["branch"] + else: + _branch = branch_vars["git_branch"] + new_git_repository = self.git_repository( + name=git_repo["name"], + url=git_repo["url"], + branch=_branch, + provided_contents=_data_types, + secrets_group=git_repo["secrets_group_name"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_git_repository) + _data_types.clear() + + def load_dynamic_group(self, dyn_group): + """Load DynamicGroup objects from Bootstrap into DiffSync models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap DynamicGroup: {dyn_group}") + try: + self.get(self.dynamic_group, dyn_group["name"]) + except ObjectNotFound: + new_dynamic_group = self.dynamic_group( + name=dyn_group["name"], + content_type=dyn_group["content_type"], + dynamic_filter=json.loads(dyn_group["filter"]), + description=dyn_group["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(new_dynamic_group) + + def load_computed_field(self, comp_field): + """Load ComputedField objects from Bootstrap into DiffSync Models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap ComputedField: {comp_field}") + try: + self.get(self.computed_field, comp_field["label"]) + except ObjectNotFound: + _new_comp_field = self.computed_field( + label=comp_field["label"], + content_type=comp_field["content_type"], + template=comp_field["template"], + ) + self.add(_new_comp_field) + + def load_tag(self, tag): + """Load Tag objects from Bootstrap into DiffSync Models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Tag: {tag}") + if len(tag["content_types"]) > 1: + _content_types = tag["content_types"] + _content_types.sort() + else: + _content_types = tag["content_types"] + try: + self.get(self.tag, tag["name"]) + except ObjectNotFound: + _new_tag = self.tag( + name=tag["name"], + color=tag["color"] if not None else "9e9e9e", + content_types=_content_types, + description=tag["description"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_tag) + + def load_graph_ql_query(self, query): + """Load GraphQLQuery objects from Bootstrap into DiffSync Models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap GraphQLQuery {query}") + try: + self.get(self.graph_ql_query, query["name"]) + except ObjectNotFound: + _new_graphqlq = self.graph_ql_query(name=query["name"], query=query["query"]) + self.add(_new_graphqlq) + + def load_software(self, software): + """Load Software objects from Bootstrap into DiffSync Models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap Software {software}") + try: + self.get( + self.software, + { + "version": software["version"], + "platform": software["device_platform"], + }, + ) + except ObjectNotFound: + try: + _release_date = datetime.datetime.strptime(software["release_date"], "%Y-%m-%d") + except TypeError: + _release_date = None + try: + _eos_date = datetime.datetime.strptime(software["eos_date"], "%Y-%m-%d") + except TypeError: + _eos_date = None + if software["documentation_url"] is None: + _documentation_url = "" + else: + _documentation_url = software["documentation_url"] + _new_software = self.software( + version=software["version"], + platform=software["device_platform"], + alias=software["alias"] if not None else "", + release_date=_release_date, + eos_date=_eos_date, + documentation_url=_documentation_url, + long_term_support=software["lts"], + pre_release=software["pre_release"], + tags=software["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_software) + + def load_software_image(self, software_image): + """Load SoftwareImage objects from Bootstrap into DiffSync Models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap SoftwareImage {software_image}") + try: + self.get(self.software_image, software_image["file_name"]) + except ObjectNotFound: + _new_software_image = self.software_image( + software=f'{software_image["platform"]} - {software_image["software_version"]}', + platform=software_image["platform"], + software_version=software_image["software_version"], + file_name=software_image["file_name"], + download_url=software_image["download_url"], + image_file_checksum=software_image["image_file_checksum"], + hashing_algorithm=software_image["hashing_algorithm"], + default_image=software_image["default_image"] if not None else False, + tags=software_image["tags"], + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_software_image) + + def load_validated_software(self, validated_software): + """Load ValidatedSoftware objects from Bootstrap into DiffSync Models.""" + if self.job.debug: + self.job.logger.debug(f"Loading Bootstrap ValidatedSoftware {validated_software}") + try: + self.get( + self.validated_software, + { + "software": {validated_software["software"]}, + "valid_since": {validated_software["valid_since"]}, + "valid_until": {validated_software["valid_until"]}, + }, + ) + except ObjectNotFound: + _new_validated_software = self.validated_software( + software=validated_software["software"], + software_version=validated_software["software"].split(" - ", 1)[1], + platform=validated_software["software"].split(" - ", 1)[0], + valid_since=validated_software["valid_since"], + valid_until=validated_software["valid_until"], + preferred_version=validated_software["preferred_version"], + devices=sorted(validated_software["devices"]), + device_types=sorted(validated_software["device_types"]), + device_roles=sorted(validated_software["device_roles"]), + inventory_items=sorted(validated_software["inventory_items"]), + object_tags=sorted(validated_software["object_tags"]), + tags=sorted(validated_software["tags"]), + system_of_record=os.getenv("SYSTEM_OF_RECORD", "Bootstrap"), + ) + self.add(_new_validated_software) + + def load(self): + """Load data from Bootstrap into DiffSync models.""" + environment_label = settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_nautobot_environment_branch"] + + if is_running_tests(): + load_type = "file" + elif self.job.load_source == "env_var": + load_type = os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE", "file") + else: + load_type = self.job.load_source + + global global_settings + global_settings = None + + if load_type == "file": + directory_path = "nautobot_ssot/integrations/bootstrap/fixtures" + # generates a variable for each file in fixtures named the same as the file name less .yaml + for filename in os.listdir(directory_path): + if filename.endswith(".yaml") or filename.endswith(".yml"): + with open(os.path.join(directory_path, filename), "r") as file: + yaml_data = yaml.safe_load(file) + variable_name = os.path.splitext(filename)[0] + globals()[variable_name] = yaml_data + + branch_vars = globals()[environment_label] + global_settings = globals().get("global_settings") + + elif load_type == "git": + repo = GitRepository.objects.filter( + name__icontains="Bootstrap", + provided_contents__icontains="extras.configcontext", + ) + if len(repo) == 0: + self.job.logger.warning( + "Unable to find Bootstrap SSoT Repository configured in Nautobot, please ensure a git repository with a name containing 'Bootstrap' is present and provides 'configcontext' type." + ) + else: + repo = repo[0] + if self.job.debug: + self.job.logger.debug(f"Sync the {repo.name} GitRepository.") + ensure_git_repository(repository_record=repo) + self.job.logger.info(f"Parsing the {repo.name} GitRepository.") + os.chdir(f"{repo.filesystem_path}") + directory_path = "./" + # generates a variable for each file in fixtures named the same as the file name less .yaml + for filename in os.listdir("./"): + if filename.endswith(".yaml") or filename.endswith(".yml"): + with open(os.path.join(directory_path, filename), "r") as file: + yaml_data = yaml.safe_load(file) + variable_name = os.path.splitext(filename)[0] + globals()[variable_name] = yaml_data + + branch_vars = globals()[environment_label] + global_settings = globals().get("global_settings") + + # Ensure global_settings is loaded + if global_settings is None: + self.job.logger.error("global_settings not loaded. Check if the file exists in the correct directory.") + return + + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant_group"]: + if global_settings["tenant_group"] is not None: # noqa: F821 + for bs_tenant_group in global_settings["tenant_group"]: # noqa: F821 + self.load_tenant_group(bs_tenant_group=bs_tenant_group, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant"]: + if global_settings["tenant"] is not None: # noqa: F821 + for bs_tenant in global_settings["tenant"]: # noqa: F821 + self.load_tenant(bs_tenant=bs_tenant, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["role"]: + if global_settings["role"] is not None: # noqa: F821 + for bs_role in global_settings["role"]: # noqa: F821 + self.load_role(bs_role=bs_role, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["manufacturer"]: + if global_settings["manufacturer"] is not None: # noqa: F821 + for bs_manufacturer in global_settings["manufacturer"]: # noqa: F821 + self.load_manufacturer(bs_manufacturer=bs_manufacturer, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["platform"]: + if global_settings["platform"] is not None: # noqa: F821 + for bs_platform in global_settings["platform"]: # noqa: F821 + self.load_platform(bs_platform=bs_platform, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location_type"]: + if global_settings["location_type"] is not None: # noqa: F821 + for bs_location_type in global_settings["location_type"]: # noqa: F821 + self.load_location_type(bs_location_type=bs_location_type, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location"]: + if global_settings["location"] is not None: # noqa: F821 + for bs_location in global_settings["location"]: # noqa: F821 + self.load_location(bs_location=bs_location, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["team"]: + if global_settings["team"] is not None: # noqa: F821 + for bs_team in global_settings["team"]: # noqa: F821 + self.load_team(bs_team=bs_team, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["contact"]: + if global_settings["contact"] is not None: # noqa: F821 + for bs_contact in global_settings["contact"]: # noqa: F821 + self.load_contact(bs_contact=bs_contact, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider"]: + if global_settings["provider"] is not None: # noqa: F821 + for bs_provider in global_settings["provider"]: # noqa: F821 + self.load_provider(bs_provider=bs_provider, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider_network"]: + if global_settings["provider_network"] is not None: # noqa: F821 + for bs_provider_network in global_settings["provider_network"]: # noqa: F821 + self.load_provider_network(bs_provider_network=bs_provider_network, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_type"]: + if global_settings["circuit_type"] is not None: # noqa: F821 + for bs_circuit_type in global_settings["circuit_type"]: # noqa: F821 + self.load_circuit_type(bs_circuit_type=bs_circuit_type, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit"]: + if global_settings["circuit"] is not None: # noqa: F821 + for bs_circuit in global_settings["circuit"]: # noqa: F821 + self.load_circuit(bs_circuit=bs_circuit, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_termination"]: + if global_settings["circuit_termination"] is not None: # noqa: F821 + for bs_circuit_termination in global_settings["circuit_termination"]: # noqa: F821 + self.load_circuit_termination( + bs_circuit_termination=bs_circuit_termination, + branch_vars=branch_vars, + ) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["namespace"]: + if global_settings["namespace"] is not None: # noqa: F821 + for bs_namespace in global_settings["namespace"]: # noqa: F821 + self.load_namespace(bs_namespace=bs_namespace, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["rir"]: + if global_settings["rir"] is not None: # noqa: F821 + for bs_rir in global_settings["rir"]: # noqa: F821 + self.load_rir(bs_rir=bs_rir, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan_group"]: + if global_settings["vlan_group"] is not None: # noqa: F821 + for bs_vlan_group in global_settings["vlan_group"]: # noqa: F821 + self.load_vlan_group(bs_vlan_group=bs_vlan_group, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan"]: + if global_settings["vlan"] is not None: # noqa: F821 + for bs_vlan in global_settings["vlan"]: # noqa: F821 + self.load_vlan(bs_vlan=bs_vlan, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vrf"]: + if global_settings["vrf"] is not None: # noqa: F821 + for bs_vrf in global_settings["vrf"]: # noqa: F821 + self.load_vrf(bs_vrf=bs_vrf, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["prefix"]: + if global_settings["prefix"] is not None: # noqa: F821 + for bs_prefix in global_settings["prefix"]: # noqa: F821 + self.load_prefix(bs_prefix=bs_prefix, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secret"]: + if global_settings["secret"] is not None: # noqa: F821 + for bs_secret in global_settings["secret"]: # noqa: F821 + self.load_secret(bs_secret=bs_secret, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secrets_group"]: + if global_settings["secrets_group"] is not None: # noqa: F821 + for bs_sg in global_settings["secrets_group"]: # noqa: F821 + self.load_secrets_group(bs_sg=bs_sg, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["git_repository"]: + if global_settings["git_repository"] is not None: # noqa: F821 + for git_repo in global_settings["git_repository"]: # noqa: F821 + self.load_git_repository(git_repo=git_repo, branch_vars=branch_vars) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["dynamic_group"]: + if global_settings["dynamic_group"] is not None: # noqa: F821 + for dyn_group in global_settings["dynamic_group"]: # noqa: F821 + self.load_dynamic_group(dyn_group=dyn_group) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["computed_field"]: + if global_settings["computed_field"] is not None: # noqa: F821 + for computed_field in global_settings["computed_field"]: # noqa: F821 + self.load_computed_field(comp_field=computed_field) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tag"]: + if global_settings["tag"] is not None: # noqa: F821 + for tag in global_settings["tag"]: # noqa: F821 + self.load_tag(tag=tag) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["graph_ql_query"]: + if global_settings["graph_ql_query"] is not None: # noqa F821 + for graph_ql_query in global_settings["graph_ql_query"]: # noqa F821 + self.load_graph_ql_query(query=graph_ql_query) + if LIFECYCLE_MGMT: + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software"]: + for software in global_settings["software"]: # noqa: F821 + self.load_software(software=software) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software_image"]: + for software_image in global_settings["software_image"]: # noqa: F821 + self.load_software_image(software_image=software_image) + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["validated_software"]: + for validated_software in global_settings["validated_software"]: # noqa: F821 + self.load_validated_software(validated_software=validated_software) diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py new file mode 100755 index 000000000..4a07b4052 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/adapters/nautobot.py @@ -0,0 +1,1359 @@ +"""Nautobot Adapter for bootstrap SSoT plugin.""" + +from diffsync import Adapter +from diffsync.enum import DiffSyncModelFlags +from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound +from django.conf import settings +from nautobot.circuits.models import ( + Circuit, + CircuitTermination, + CircuitType, + Provider, + ProviderNetwork, +) +from nautobot.dcim.models import ( + Location, + LocationType, + Manufacturer, + Platform, +) +from nautobot.extras.models import ( + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + Role, + Secret, + SecretsGroup, + Status, + Tag, + Team, +) +from nautobot.ipam.models import ( + RIR, + VLAN, + VRF, + Namespace, + Prefix, + VLANGroup, +) +from nautobot.tenancy.models import Tenant, TenantGroup + +from nautobot_ssot.integrations.bootstrap.diffsync.models.nautobot import ( + NautobotCircuit, + NautobotCircuitTermination, + NautobotCircuitType, + NautobotComputedField, + NautobotContact, + NautobotDynamicGroup, + NautobotGitRepository, + NautobotGraphQLQuery, + NautobotLocation, + NautobotLocationType, + NautobotManufacturer, + NautobotNamespace, + NautobotPlatform, + NautobotPrefix, + NautobotProvider, + NautobotProviderNetwork, + NautobotRiR, + NautobotRole, + NautobotSecret, + NautobotSecretsGroup, + NautobotTag, + NautobotTeam, + NautobotTenant, + NautobotTenantGroup, + NautobotVLAN, + NautobotVLANGroup, + NautobotVRF, +) +from nautobot_ssot.integrations.bootstrap.utils import ( + check_sor_field, + get_sor_field_nautobot_object, + lookup_content_type_model_path, + lookup_model_for_role_id, + lookup_model_for_taggable_class_id, +) +from nautobot_ssot.integrations.bootstrap.utils.nautobot import ( + get_prefix_location_assignments, + get_vrf_prefix_assignments, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareImageLCM as ORMSoftwareImage, + ) + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareLCM as ORMSoftware, + ) + + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + ValidatedSoftwareLCM as ORMValidatedSoftware, + ) + + # noqa: F401 + from nautobot_ssot.integrations.bootstrap.diffsync.models.nautobot import ( # noqa: F401 + NautobotSoftware, + NautobotSoftwareImage, + NautobotValidatedSoftware, + ) + + +class NautobotAdapter(Adapter): + """DiffSync adapter for Nautobot.""" + + tenant_group = NautobotTenantGroup + tenant = NautobotTenant + role = NautobotRole + manufacturer = NautobotManufacturer + platform = NautobotPlatform + location_type = NautobotLocationType + location = NautobotLocation + team = NautobotTeam + contact = NautobotContact + provider = NautobotProvider + provider_network = NautobotProviderNetwork + circuit_type = NautobotCircuitType + circuit = NautobotCircuit + circuit_termination = NautobotCircuitTermination + namespace = NautobotNamespace + rir = NautobotRiR + vlan_group = NautobotVLANGroup + vlan = NautobotVLAN + vrf = NautobotVRF + prefix = NautobotPrefix + secret = NautobotSecret + secrets_group = NautobotSecretsGroup + git_repository = NautobotGitRepository + dynamic_group = NautobotDynamicGroup + computed_field = NautobotComputedField + tag = NautobotTag + graph_ql_query = NautobotGraphQLQuery + + if LIFECYCLE_MGMT: + software = NautobotSoftware + software_image = NautobotSoftwareImage + validated_software = NautobotValidatedSoftware + + top_level = [ + "tag", + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "graph_ql_query", + ] + + if LIFECYCLE_MGMT: + top_level.append("software") + top_level.append("software_image") + top_level.append("validated_software") + + def __init__(self, *args, job=None, sync=None, **kwargs): # noqa: D417 + """Initialize Nautobot. + + Args: + job (object, optional): Nautobot job. Defaults to None. + sync (object, optional): Nautobot DiffSync. Defaults to None. + """ + super().__init__(*args, **kwargs) + self.job = job + self.sync = sync + + def load_tenant_group(self): + """Method to load TenantGroup objects from Nautobot into NautobotTenantGroup DiffSync models.""" + for nb_tenant_group in TenantGroup.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot TenantGroup: {nb_tenant_group}, with ID: {nb_tenant_group.id}") + try: + self.get(self.tenant_group, nb_tenant_group.name) + except ObjectNotFound: + try: + _parent = nb_tenant_group.parent.name + except AttributeError: + _parent = "" + _sor = "" + if "system_of_record" in nb_tenant_group.custom_field_data: + _sor = ( + nb_tenant_group.custom_field_data["system_of_record"] + if nb_tenant_group.custom_field_data["system_of_record"] is not None + else "" + ) + new_tenant_group = self.tenant_group( + name=nb_tenant_group.name, + parent=_parent, + description=nb_tenant_group.description, + system_of_record=_sor, + uuid=nb_tenant_group.id, + ) + self.job.logger.info(f"Loading Nautobot Tenant Group - {nb_tenant_group.name}") + + if not check_sor_field(nb_tenant_group): + new_tenant_group.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_tenant_group) + + def load_tenant(self): + """Method to load Tenant objects from Nautobot into NautobotTenant DiffSync models.""" + for nb_tenant in Tenant.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Tenant: {nb_tenant}, with ID: {nb_tenant.id}") + _tags = sorted(list(nb_tenant.tags.all().values_list("name", flat=True))) + try: + self.get(self.tenant, nb_tenant.name) + except ObjectNotFound: + try: + _tenant_group = nb_tenant.tenant_group.name + except AttributeError: + _tenant_group = None + _sor = "" + if "system_of_record" in nb_tenant.custom_field_data: + _sor = ( + nb_tenant.custom_field_data["system_of_record"] + if nb_tenant.custom_field_data["system_of_record"] is not None + else "" + ) + new_tenant = self.tenant( + name=nb_tenant.name, + tenant_group=_tenant_group, + description=nb_tenant.description, + tags=_tags, + system_of_record=_sor, + uuid=nb_tenant.id, + ) + self.job.logger.info(f"Loading Nautobot Tenant - {nb_tenant.name}") + + if not check_sor_field(nb_tenant): + new_tenant.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_tenant) + + def load_role(self): + """Method to load Role objects from Nautobot into NautobotRole DiffSync models.""" + for nb_role in Role.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Role: {nb_role}, with ID {nb_role.id}") + try: + self.get(self.role, nb_role.name) + except ObjectNotFound: + _content_types = [] + _content_uuids = nb_role.content_types.values_list("model", "id") + for _uuid in _content_uuids: + _content_types.append(lookup_model_for_role_id(_uuid[1])) + _content_types.sort() + _sor = "" + if "system_of_record" in nb_role.custom_field_data: + _sor = ( + nb_role.custom_field_data["system_of_record"] + if nb_role.custom_field_data["system_of_record"] is not None + else "" + ) + new_role = self.role( + name=nb_role.name, + weight=nb_role.weight, + description=nb_role.description, + color=nb_role.color, + content_types=_content_types, + system_of_record=_sor, + ) + + if not check_sor_field(nb_role): + new_role.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_role) + _content_types.clear() + + def load_manufacturer(self): + """Method to load Manufacturer objects from Nautobot into NautobotManufacturer DiffSync models.""" + for nb_manufacturer in Manufacturer.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Manufacturer: {nb_manufacturer}, with ID {nb_manufacturer.id}") + try: + self.get(self.manufacturer, nb_manufacturer.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_manufacturer.custom_field_data: + _sor = ( + nb_manufacturer.custom_field_data["system_of_record"] + if nb_manufacturer.custom_field_data["system_of_record"] is not None + else "" + ) + new_manufacturer = self.manufacturer( + name=nb_manufacturer.name, + description=nb_manufacturer.description, + uuid=nb_manufacturer.id, + system_of_record=_sor, + ) + + if not check_sor_field(nb_manufacturer): + new_manufacturer.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_manufacturer) + + def load_platform(self): + """Method to load Platform objects from Nautobot into NautobotPlatform DiffSync models.""" + for nb_platform in Platform.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Platform: {nb_platform}, with ID {nb_platform.id}") + try: + self.get(self.platform, nb_platform.name) + except ObjectNotFound: + if isinstance(nb_platform.napalm_args, str): + _napalm_args = {} + else: + _napalm_args = nb_platform.napalm_args + _sor = "" + if "system_of_record" in nb_platform.custom_field_data: + _sor = ( + nb_platform.custom_field_data["system_of_record"] + if nb_platform.custom_field_data["system_of_record"] is not None + else "" + ) + new_platform = self.platform( + name=nb_platform.name, + manufacturer=nb_platform.manufacturer.name, + network_driver=nb_platform.network_driver, + napalm_driver=nb_platform.napalm_driver, + napalm_arguments=_napalm_args, + description=nb_platform.description, + system_of_record=_sor, + uuid=nb_platform.id, + ) + + if not check_sor_field(nb_platform): + new_platform.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_platform) + + def load_location_type(self): + """Method to load LocationType objects from Nautobot into NautobotLocationType DiffSync models.""" + for nb_location_type in LocationType.objects.all(): + if self.job.debug: + self.job.logger.debug( + f"Loading Nautobot LocationType: {nb_location_type}, with ID {nb_location_type.id}" + ) + try: + self.get(self.location_type, nb_location_type.name) + except ObjectNotFound: + _content_types = [] + _content_uuids = nb_location_type.content_types.values_list("id", flat=True) + if nb_location_type.parent is not None: + _parent = nb_location_type.parent.name + else: + _parent = None + for _uuid in _content_uuids: + _content_types.append(lookup_content_type_model_path(nb_model="locations", content_id=_uuid)) + if len(_content_types) > 1: + try: + _content_types.sort() + except TypeError: + self.job.logger.warning( + f"One of your content types is not able to be associated with LocationType {nb_location_type}. Please check and try again. {_content_types}" + ) + _sor = "" + if "system_of_record" in nb_location_type.custom_field_data: + _sor = ( + nb_location_type.custom_field_data["system_of_record"] + if nb_location_type.custom_field_data["system_of_record"] is not None + else "" + ) + new_location_type = self.location_type( + name=nb_location_type.name, + parent=_parent, + nestable=nb_location_type.nestable if not None else False, + description=nb_location_type.description, + content_types=_content_types, + system_of_record=_sor, + uuid=nb_location_type.id, + ) + + if not check_sor_field(nb_location_type): + new_location_type.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_location_type) + _content_types.clear() + + def load_location(self): + """Method to load Location objects from Nautobot into NautobotLocation DiffSync models.""" + for nb_location in Location.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Location: {nb_location}, with ID {nb_location.id}") + try: + self.get(self.location, nb_location.name) + except ObjectNotFound: + _tags = [] + if nb_location.parent is not None: + _parent = nb_location.parent.name + else: + _parent = None + if nb_location.time_zone is not None: + try: + _time_zone = nb_location.time_zone.zone + except AttributeError: + _time_zone = nb_location.time_zone + else: + _time_zone = None + if nb_location.tenant is not None: + _tenant = nb_location.tenant.name + else: + _tenant = None + if nb_location.tags is not None: + for _tag in nb_location.tags.values_list("name", flat=True): + _tags.append(_tag) + _sor = "" + if "system_of_record" in nb_location.custom_field_data: + _sor = ( + nb_location.custom_field_data["system_of_record"] + if nb_location.custom_field_data["system_of_record"] is not None + else "" + ) + new_location = self.location( + name=nb_location.name, + location_type=nb_location.location_type.name, + parent=_parent, + status=nb_location.status.name, + facility=nb_location.facility, + asn=nb_location.asn, + time_zone=str(_time_zone), + description=nb_location.description, + tenant=_tenant, + physical_address=nb_location.physical_address, + shipping_address=nb_location.shipping_address, + latitude=nb_location.latitude, + longitude=nb_location.longitude, + contact_name=nb_location.contact_name, + contact_phone=nb_location.contact_phone, + contact_email=nb_location.contact_email, + tags=_tags, + system_of_record=_sor, + uuid=nb_location.id, + ) + + if not check_sor_field(nb_location): + new_location.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_location) + + def load_team(self): + """Method to load Team objects from Nautobot into NautobotTeam DiffSync models.""" + for nb_team in Team.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Team: {nb_team}, with ID: {nb_team.id}") + try: + self.get(self.team, nb_team.name) + except ObjectNotFound: + if nb_team.contacts is not None: + _contacts = [] + for _contact in nb_team.contacts.values_list("name", flat=True): + _contacts.append(_contact) + _contacts.sort() + _sor = "" + if "system_of_record" in nb_team.custom_field_data: + _sor = ( + nb_team.custom_field_data["system_of_record"] + if nb_team.custom_field_data["system_of_record"] is not None + else "" + ) + new_team = self.team( + name=nb_team.name, + phone=nb_team.phone, + email=nb_team.email, + address=nb_team.address, + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts=_contacts, + system_of_record=_sor, + uuid=nb_team.id, + ) + + if not check_sor_field(nb_team): + new_team.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_team) + + def load_contact(self): + """Method to load Contact Objects from Nautobot into NautobotContact DiffSync models.""" + for nb_contact in Contact.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot contact: {nb_contact}, with ID: {nb_contact.id}") + try: + self.get(self.contact, nb_contact.name) + except ObjectNotFound: + if nb_contact.teams is not None: + _teams = [] + for _team in nb_contact.teams.values_list("name", flat=True): + _teams.append(_team) + _teams.sort() + _sor = "" + if "system_of_record" in nb_contact.custom_field_data: + _sor = ( + nb_contact.custom_field_data["system_of_record"] + if nb_contact.custom_field_data["system_of_record"] is not None + else "" + ) + new_contact = self.contact( + name=nb_contact.name, + phone=nb_contact.phone, + email=nb_contact.email, + address=nb_contact.address, + teams=_teams, + system_of_record=_sor, + uuid=nb_contact.id, + ) + + if not check_sor_field(nb_contact): + new_contact.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_contact) + + def load_provider(self): + """Method to load Provider objects from Nautobot into NautobotProvider DiffSync models.""" + for nb_provider in Provider.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Provider: {nb_provider}, with ID {nb_provider.id}") + try: + self.get(self.provider, nb_provider.name) + except ObjectNotFound: + if nb_provider.tags is not None: + _tags = [] + for _tag in nb_provider.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_provider.custom_field_data: + _sor = ( + nb_provider.custom_field_data["system_of_record"] + if nb_provider.custom_field_data["system_of_record"] is not None + else "" + ) + new_provider = self.provider( + name=nb_provider.name, + asn=nb_provider.asn, + account_number=nb_provider.account, + portal_url=nb_provider.portal_url, + noc_contact=nb_provider.noc_contact, + admin_contact=nb_provider.admin_contact, + tags=_tags, + system_of_record=_sor, + uuid=nb_provider.id, + ) + + if not check_sor_field(nb_provider): + new_provider.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_provider) + + def load_provider_network(self): + """Method to load ProviderNetwork objects from Nautobot into NautobotProviderNetwork DiffSync models.""" + for nb_provider_network in ProviderNetwork.objects.all(): + if self.job.debug: + self.job.logger.debug( + f"Loading Nautobot ProviderNetwork: {nb_provider_network}, with ID {nb_provider_network.id}" + ) + try: + self.get(self.provider_network, nb_provider_network.name) + except ObjectNotFound: + if nb_provider_network.tags is not None: + _tags = [] + for _tag in nb_provider_network.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_provider_network.custom_field_data: + _sor = ( + nb_provider_network.custom_field_data["system_of_record"] + if nb_provider_network.custom_field_data["system_of_record"] is not None + else "" + ) + new_provider_network = self.provider_network( + name=nb_provider_network.name, + provider=nb_provider_network.provider.name, + description=nb_provider_network.description, + comments=nb_provider_network.comments, + tags=_tags, + system_of_record=_sor, + uuid=nb_provider_network.id, + ) + + if not check_sor_field(nb_provider_network): + new_provider_network.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_provider_network) + + def load_circuit_type(self): + """Method to load CircuitType objects from Nautobot into NautobotCircuitType DiffSync models.""" + for nb_circuit_type in CircuitType.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot CircuitType: {nb_circuit_type}, with ID {nb_circuit_type.id}") + try: + self.get(self.circuit_type, nb_circuit_type.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_circuit_type.custom_field_data: + _sor = ( + nb_circuit_type.custom_field_data["system_of_record"] + if nb_circuit_type.custom_field_data["system_of_record"] is not None + else "" + ) + new_circuit_type = self.circuit_type( + name=nb_circuit_type.name, + description=nb_circuit_type.description, + system_of_record=_sor, + uuid=nb_circuit_type.id, + ) + + if not check_sor_field(nb_circuit_type): + new_circuit_type.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_circuit_type) + + def load_circuit(self): + """Method to load Circuit objects from Nautobot into NautobotCircuit DiffSync models.""" + for nb_circuit in Circuit.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Circuit: {nb_circuit}, with ID {nb_circuit.id}") + try: + self.get(self.circuit, nb_circuit.cid) + except ObjectNotFound: + if nb_circuit.tags is not None: + _tags = [] + for _tag in nb_circuit.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_circuit.custom_field_data: + _sor = ( + nb_circuit.custom_field_data["system_of_record"] + if nb_circuit.custom_field_data["system_of_record"] is not None + else "" + ) + new_circuit = self.circuit( + circuit_id=nb_circuit.cid, + provider=nb_circuit.provider.name, + circuit_type=nb_circuit.circuit_type.name, + status=nb_circuit.status.name, + date_installed=nb_circuit.install_date, + commit_rate_kbps=nb_circuit.commit_rate, + description=nb_circuit.description, + tenant=(nb_circuit.tenant.name if nb_circuit.tenant is not None else None), + tags=_tags, + system_of_record=_sor, + uuid=nb_circuit.id, + ) + + if not check_sor_field(nb_circuit): + new_circuit.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_circuit) + + def load_circuit_termination(self): + """Method to load CircuitTermination objects from Nautobot into NautobotCircuitTermination DiffSync models.""" + for nb_circuit_termination in CircuitTermination.objects.all(): + if self.job.debug: + self.job.logger.debug( + f"Loading Nautobot CircuitTermination {nb_circuit_termination}, with ID: {nb_circuit_termination.id}" + ) + _term_name = f"{nb_circuit_termination.circuit.cid}__{nb_circuit_termination.circuit.provider.name}__{nb_circuit_termination.term_side}" + try: + self.get(self.circuit_termination, _term_name) + except ObjectNotFound: + if nb_circuit_termination.tags is not None: + _tags = [] + for _tag in nb_circuit_termination.tags.values_list("name", flat=True): + _tags.append(_tag) + _tags.sort() + else: + _tags = None + _sor = "" + if "system_of_record" in nb_circuit_termination.custom_field_data: + _sor = ( + nb_circuit_termination.custom_field_data["system_of_record"] + if nb_circuit_termination.custom_field_data["system_of_record"] is not None + else "" + ) + if nb_circuit_termination.provider_network: + _termination_type = "Provider Network" + if nb_circuit_termination.location: + _termination_type = "Location" + new_circuit_termination = self.circuit_termination( + name=_term_name, + termination_type=_termination_type, + termination_side=nb_circuit_termination.term_side, + circuit_id=nb_circuit_termination.circuit.cid, + provider_network=( + nb_circuit_termination.provider_network.name + if nb_circuit_termination.provider_network is not None + else None + ), + location=( + nb_circuit_termination.location.name if nb_circuit_termination.location is not None else None + ), + port_speed_kbps=nb_circuit_termination.port_speed, + upstream_speed_kbps=nb_circuit_termination.upstream_speed, + cross_connect_id=nb_circuit_termination.xconnect_id, + patch_panel_or_ports=nb_circuit_termination.pp_info, + description=nb_circuit_termination.description, + tags=_tags, + system_of_record=_sor, + uuid=nb_circuit_termination.id, + ) + + if not check_sor_field(nb_circuit_termination): + new_circuit_termination.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_circuit_termination) + try: + _circuit = self.get( + self.circuit, + { + "circuit_id": nb_circuit_termination.circuit.cid, + "provider": nb_circuit_termination.circuit.provider.name, + }, + ) + _circuit.add_child(new_circuit_termination) + except ObjectAlreadyExists as err: + self.job.logger.warning(f"CircuitTermination for {_circuit} already exists. {err}") + + def load_namespace(self): + """Method to load Namespace objects from Nautobot into NautobotNamespace DiffSync models.""" + for nb_namespace in Namespace.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Namespace {nb_namespace}, with ID: {nb_namespace.id}") + try: + self.get(self.namespace, nb_namespace.name) + except ObjectNotFound: + _sor = get_sor_field_nautobot_object(nb_namespace) + try: + _location = Location.objects.get(id=nb_namespace.location_id).name + except Location.DoesNotExist: + _location = "" + new_namespace = self.namespace( + name=nb_namespace.name, + description=nb_namespace.description, + location=_location, + system_of_record=_sor, + uuid=nb_namespace.id, + ) + if not check_sor_field(nb_namespace): + new_namespace.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_namespace) + + def load_rir(self): + """Method to load RiR objects from Nautobot into NautobotRiR DiffSync models.""" + for nb_rir in RIR.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot RiR {nb_rir}, with ID {nb_rir.id}") + try: + self.get(self.rir, nb_rir.name) + except ObjectNotFound: + _sor = get_sor_field_nautobot_object(nb_rir) + new_rir = self.rir( + name=nb_rir.name, + private=nb_rir.is_private, + description=nb_rir.description, + system_of_record=_sor, + uuid=nb_rir.id, + ) + if not check_sor_field(nb_rir): + new_rir.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_rir) + + def load_vlan_group(self): + """Method to load VLANGroup objects from Nautobot into NautobotVLANGroup DiffSync models.""" + for nb_vlan_group in VLANGroup.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot VLANGroup {nb_vlan_group}, with ID {nb_vlan_group.id}") + try: + self.get(self.vlan_group, nb_vlan_group.name) + except ObjectNotFound: + _sor = get_sor_field_nautobot_object(nb_vlan_group) + if nb_vlan_group.location: + _location = nb_vlan_group.location.name + else: + _location = "" + new_vlan_group = self.vlan_group( + name=nb_vlan_group.name, + description=nb_vlan_group.description, + location=_location, + system_of_record=_sor, + uuid=nb_vlan_group.id, + ) + if not check_sor_field(nb_vlan_group): + new_vlan_group.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_vlan_group) + + def load_vlan(self): + """Method to load VLAN objects from Nautobot into NautobotVLAN DiffSync models.""" + for nb_vlan in VLAN.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot VLAN {nb_vlan}, with ID {nb_vlan.id}") + try: + self.get( + self.vlan, + { + "name": nb_vlan.name, + "vid": nb_vlan.vid, + "vlan_group": (nb_vlan.vlan_group.name if nb_vlan.vlan_group else ""), + }, + ) + except ObjectNotFound: + _locations = [] + _tags = [] + _sor = get_sor_field_nautobot_object(nb_vlan) + if nb_vlan.locations: + for _location in nb_vlan.locations.values_list("name", flat=True): + _locations.append(_location) + if nb_vlan.tags: + for _tag in nb_vlan.tags.values_list("name", flat=True): + _tags.append(_tag) + new_vlan = self.vlan( + name=nb_vlan.name, + vid=nb_vlan.vid, + vlan_group=nb_vlan.vlan_group.name if nb_vlan.vlan_group else None, + role=nb_vlan.role.name if nb_vlan.role else None, + description=nb_vlan.description, + status=nb_vlan.status.name, + locations=_locations, + tenant=nb_vlan.tenant.name if nb_vlan.tenant else None, + tags=_tags, + system_of_record=_sor, + uuid=nb_vlan.id, + ) + if not check_sor_field(nb_vlan): + new_vlan.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_vlan) + + def load_vrf(self): + """Method to load VRF objects from Nautobot into NautobotVRF DiffSync models.""" + for nb_vrf in VRF.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot VRF {nb_vrf}, with ID {nb_vrf.id}") + try: + self.get( + self.vrf, + {"name": nb_vrf.name, "namespace": {nb_vrf.namespace.name}}, + ) + except ObjectNotFound: + _tags = [] + _sor = get_sor_field_nautobot_object(nb_vrf) + if nb_vrf.tags: + for _tag in nb_vrf.tags.values_list("name", flat=True): + _tags.append(_tag) + new_vrf = self.vrf( + name=nb_vrf.name, + namespace=Namespace.objects.get(id=nb_vrf.namespace_id).name, + route_distinguisher=nb_vrf.rd, + description=nb_vrf.description, + tenant=(Tenant.objects.get(id=nb_vrf.tenant_id).name if nb_vrf.tenant_id else None), + tags=_tags, + system_of_record=_sor, + uuid=nb_vrf.id, + ) + if not check_sor_field(nb_vrf): + new_vrf.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_vrf) + + def load_prefix(self): + """Method to load Prefix objects from Nautobot into NautobotPrefix DiffSync models.""" + for nb_prefix in Prefix.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Prefix {nb_prefix}, with ID {nb_prefix.id}") + try: + self.get( + self.prefix, + { + "network": nb_prefix.network, + "namespace": nb_prefix.namespace.name, + }, + ) + except ObjectNotFound: + _tags = [] + _vlan = None + _sor = get_sor_field_nautobot_object(nb_prefix) + if nb_prefix.tags: + for _tag in nb_prefix.tags.values_list("name", flat=True): + _tags.append(_tag) + if nb_prefix.vlan: + if nb_prefix.vlan.vlan_group: + _group = nb_prefix.vlan.vlan_group.name + else: + _group = "None" + _vlan = f"{nb_prefix.vlan.name}__{nb_prefix.vlan.vid}__{_group}" + _vrfs = get_vrf_prefix_assignments(prefix=nb_prefix) + _locations = get_prefix_location_assignments(prefix=nb_prefix) + new_prefix = self.prefix( + network=f"{nb_prefix.network}/{nb_prefix.prefix_length}", + namespace=Namespace.objects.get(id=nb_prefix.namespace_id).name, + prefix_type=nb_prefix.type, + status=Status.objects.get(id=nb_prefix.status_id).name, + role=nb_prefix.role.name if nb_prefix.role else None, + rir=(RIR.objects.get(id=nb_prefix.rir_id).name if nb_prefix.rir_id else None), + date_allocated=( + nb_prefix.date_allocated.replace(tzinfo=None) if nb_prefix.date_allocated else None + ), + description=nb_prefix.description, + vrfs=_vrfs, + locations=_locations, + vlan=_vlan, + tenant=(Tenant.objects.get(id=nb_prefix.tenant_id).name if nb_prefix.tenant_id else None), + tags=_tags, + system_of_record=_sor, + uuid=nb_prefix.id, + ) + if not check_sor_field(nb_prefix): + new_prefix.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_prefix) + + def load_secret(self): + """Method to load Secrets objects from Nautobot into NautobotSecrets DiffSync models.""" + for nb_secret in Secret.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Secret: {nb_secret}, with ID: {nb_secret.id}") + try: + self.get(self.secret, nb_secret.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_secret.custom_field_data: + _sor = ( + nb_secret.custom_field_data["system_of_record"] + if nb_secret.custom_field_data["system_of_record"] is not None + else "" + ) + new_secret = self.secret( + name=nb_secret.name, + provider=nb_secret.provider, + parameters=nb_secret.parameters, + system_of_record=_sor, + uuid=nb_secret.id, + ) + self.job.logger.info(f"Loading Nautobot secret - {nb_secret.name}") + + if not check_sor_field(nb_secret): + new_secret.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_secret) + + def load_secrets_group(self): + """Method to load SecretsGroup objects from Nautobot into NautobotSecretsGroup DiffSync models.""" + _secrets = [] + for nb_sg in SecretsGroup.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot SecretsGroup: {nb_sg}") + for nb_secret in nb_sg.secrets_group_associations.all(): + _secrets.append( + { + "name": nb_secret.secret.name, + "secret_type": nb_secret.secret_type, + "access_type": nb_secret.access_type, + } + ) + _secrets = sorted(_secrets, key=lambda x: x["name"]) + try: + self.get(self.secrets_group, nb_sg.name) + except ObjectNotFound: + _sor = "" + if "system_of_record" in nb_sg.custom_field_data: + _sor = ( + nb_sg.custom_field_data["system_of_record"] + if nb_sg.custom_field_data["system_of_record"] is not None + else "" + ) + new_sg = self.secrets_group( + name=nb_sg.name, + secrets=_secrets, + system_of_record=_sor, + uuid=nb_sg.id, + ) + self.job.logger.info(f"Loading Nautobot secret - {nb_sg.name}") + + if not check_sor_field(nb_sg): + new_sg.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_sg) + _secrets.clear() + + def load_git_repository(self): + """Method to load GitRepository objects from Nautobot into NautobotGitRepository DiffSync models.""" + for nb_gr in GitRepository.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot GitRepository: {nb_gr}") + try: + self.get(self.git_repository, nb_gr.name) + except ObjectNotFound: + try: + _secrets_group = nb_gr.secrets_group.name + except AttributeError: + _secrets_group = None + _sor = "" + if "system_of_record" in nb_gr.custom_field_data: + _sor = ( + nb_gr.custom_field_data["system_of_record"] + if nb_gr.custom_field_data["system_of_record"] is not None + else "" + ) + new_gr = self.git_repository( + name=nb_gr.name, + url=nb_gr.remote_url, + branch=nb_gr.branch, + secrets_group=_secrets_group, + provided_contents=nb_gr.provided_contents, + system_of_record=_sor, + uuid=nb_gr.id, + ) + + if not check_sor_field(nb_gr): + new_gr.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_gr) + + def load_dynamic_group(self): + """Method to load DynamicGroup objects from Nautobot into NautobotDynamicGroup DiffSync models.""" + for nb_dyn_group in DynamicGroup.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot DynamicGroup {nb_dyn_group}") + try: + self.get(self.dynamic_group, nb_dyn_group.name) + except ObjectNotFound: + _content_type = lookup_content_type_model_path( + nb_model="dynamic_groups", content_id=nb_dyn_group.content_type.id + ) + if _content_type is None: + self.job.logger.warning( + f"Could not find ContentType for {nb_dyn_group.name} with ContentType ID {nb_dyn_group.content_type.id}" + ) + _sor = "" + if "system_of_record" in nb_dyn_group.custom_field_data: + _sor = ( + nb_dyn_group.custom_field_data["system_of_record"] + if nb_dyn_group.custom_field_data["system_of_record"] is not None + else "" + ) + new_dyn_group = self.dynamic_group( + name=nb_dyn_group.name, + content_type=_content_type, + dynamic_filter=nb_dyn_group.filter, + description=nb_dyn_group.description, + system_of_record=_sor, + uuid=nb_dyn_group.id, + ) + + if not check_sor_field(nb_dyn_group): + new_dyn_group.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_dyn_group) + + def load_computed_field(self): + """Method to load ComputedField objects from Nautobot into NautobotComputedField DiffSync models.""" + for nb_comp_field in ComputedField.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot ComputedField {nb_comp_field}") + try: + self.get(self.computed_field, nb_comp_field.label) + except ObjectNotFound: + _content_type = lookup_content_type_model_path( + nb_model="custom_fields", content_id=nb_comp_field.content_type.id + ) + if _content_type is None: + self.job.logger.warning( + f"Could not find ContentType for {nb_comp_field.label} with ContentType {nb_comp_field.content_type}, and ContentType ID {nb_comp_field.content_type.id}" + ) + new_computed_field = self.computed_field( + label=nb_comp_field.label, + content_type=_content_type, + template=nb_comp_field.template, + ) + new_computed_field.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_computed_field) + + def load_tag(self): + """Method to load Tag objects from Nautobot into NautobotTag DiffSync Models.""" + for nb_tag in Tag.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot Tag {nb_tag}") + try: + self.get(self.tag, nb_tag.name) + except ObjectNotFound: + _content_types = [] + _content_uuids = nb_tag.content_types.values_list("model", "id") + for _uuid in _content_uuids: + _content_types.append(lookup_model_for_taggable_class_id(_uuid[1])) + _content_types.sort() + _sor = "" + if "system_of_record" in nb_tag.custom_field_data: + _sor = ( + nb_tag.custom_field_data["system_of_record"] + if nb_tag.custom_field_data["system_of_record"] is not None + else "" + ) + new_tag = self.tag( + name=nb_tag.name, + color=nb_tag.color, + content_types=_content_types, + description=nb_tag.description, + system_of_record=_sor, + uuid=nb_tag.id, + ) + + if not check_sor_field(nb_tag): + new_tag.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_tag) + _content_types.clear() + + def load_graph_ql_query(self): + """Method to load GraphQLQuery objects from Nautobot into NautobotGraphQLQuery Models.""" + for query in GraphQLQuery.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot GraphQLQuery {query}") + try: + self.get(self.graph_ql_query, query.name) + except ObjectNotFound: + new_query = self.graph_ql_query(name=query.name, query=query.query) + new_query.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_query) + + def load_software(self): + """Method to load Software objects from Nautobot into NautobotSoftware Models.""" + for nb_software in ORMSoftware.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot SoftwareLCM {nb_software}") + try: + self.get( + self.software, + { + "version": nb_software.version, + "platform": nb_software.device_platform.name, + }, + ) + except ObjectNotFound: + _tags = list( + ORMSoftware.objects.get( + version=nb_software.version, + device_platform=nb_software.device_platform.id, + ) + .tags.all() + .values_list("name", flat=True) + ) + _sor = "" + if "system_of_record" in nb_software.custom_field_data: + _sor = ( + nb_software.custom_field_data["system_of_record"] + if nb_software.custom_field_data["system_of_record"] is not None + else "" + ) + new_software = self.software( + version=nb_software.version, + platform=nb_software.device_platform.name, + alias=nb_software.alias, + release_date=nb_software.release_date, + eos_date=nb_software.end_of_support, + documentation_url=nb_software.documentation_url, + long_term_support=nb_software.long_term_support, + pre_release=nb_software.pre_release, + tags=_tags, + system_of_record=_sor, + uuid=nb_software.id, + ) + + if not check_sor_field(nb_software): + new_software.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_software) + + def load_software_image(self): + """Method to load SoftwareImage objects from Nautobot into NautobotSoftwareImage Models.""" + for nb_software_image in ORMSoftwareImage.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot SoftwareImageLCM {nb_software_image}") + try: + self.get(self.software_image, nb_software_image.image_file_name) + except ObjectNotFound: + _tags = list( + ORMSoftwareImage.objects.get(image_file_name=nb_software_image.image_file_name) + .tags.all() + .values_list("name", flat=True) + ) + _sor = "" + if "system_of_record" in nb_software_image.custom_field_data: + _sor = ( + nb_software_image.custom_field_data["system_of_record"] + if nb_software_image.custom_field_data["system_of_record"] is not None + else "" + ) + new_software_image = self.software_image( + file_name=nb_software_image.image_file_name, + software=f"{nb_software_image.software.device_platform} - {nb_software_image.software.version}", + platform=nb_software_image.software.device_platform.name, + software_version=nb_software_image.software.version, + download_url=nb_software_image.download_url, + image_file_checksum=nb_software_image.image_file_checksum, + hashing_algorithm=nb_software_image.hashing_algorithm, + default_image=nb_software_image.default_image, + tags=_tags, + system_of_record=_sor, + uuid=nb_software_image.id, + ) + + if not check_sor_field(nb_software_image): + new_software_image.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_software_image) + + def load_validated_software(self): + """Method to load ValidatedSoftware objects from Nautobot into NautobotValidatedSoftware Models.""" + for nb_validated_software in ORMValidatedSoftware.objects.all(): + if self.job.debug: + self.job.logger.debug(f"Loading Nautobot ValidatedSoftwareLCM {nb_validated_software}") + try: + _software = ORMSoftware.objects.get( + version=nb_validated_software.software.version, + device_platform=nb_validated_software.software.device_platform.id, + ) + self.get( + self.validated_software, + { + "software": {nb_validated_software.software}, + "valid_since": {nb_validated_software.start}, + "valid_until": {nb_validated_software.end}, + }, + ) + except ObjectNotFound: + _val_software = ORMValidatedSoftware.objects.get( + software=_software, + start=nb_validated_software.start, + end=nb_validated_software.end, + ) + _tags = sorted(list(_val_software.tags.all().values_list("name", flat=True))) + _devices = sorted(list(_val_software.devices.all().values_list("name", flat=True))) + _device_types = sorted(list(_val_software.device_types.all().values_list("model", flat=True))) + _device_roles = sorted(list(_val_software.device_roles.all().values_list("name", flat=True))) + _inventory_items = sorted(list(_val_software.inventory_items.all().values_list("name", flat=True))) + _object_tags = sorted(list(_val_software.object_tags.all().values_list("name", flat=True))) + _sor = "" + if "system_of_record" in nb_validated_software.custom_field_data: + _sor = ( + nb_validated_software.custom_field_data["system_of_record"] + if nb_validated_software.custom_field_data["system_of_record"] is not None + else "" + ) + new_validated_software = self.validated_software( + software=f"{nb_validated_software.software.device_platform} - {nb_validated_software.software.version}", + software_version=nb_validated_software.software.version, + platform=nb_validated_software.software.device_platform.name, + valid_since=nb_validated_software.start, + valid_until=nb_validated_software.end, + preferred_version=nb_validated_software.preferred, + devices=_devices, + device_types=_device_types, + device_roles=_device_roles, + inventory_items=_inventory_items, + object_tags=_object_tags, + tags=_tags, + system_of_record=_sor, + uuid=nb_validated_software.id, + ) + + if not check_sor_field(nb_validated_software): + new_validated_software.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + self.add(new_validated_software) + + def load(self): + """Load data from Nautobot into DiffSync models.""" + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant_group"]: + self.load_tenant_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tenant"]: + self.load_tenant() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["role"]: + self.load_role() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["manufacturer"]: + self.load_manufacturer() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["platform"]: + self.load_platform() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location_type"]: + self.load_location_type() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["location"]: + self.load_location() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["team"]: + self.load_team() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["contact"]: + self.load_contact() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider"]: + self.load_provider() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["provider_network"]: + self.load_provider_network() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_type"]: + self.load_circuit_type() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit"]: + self.load_circuit() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["circuit_termination"]: + self.load_circuit_termination() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["namespace"]: + self.load_namespace() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["rir"]: + self.load_rir() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan_group"]: + self.load_vlan_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vlan"]: + self.load_vlan() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["vrf"]: + self.load_vrf() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["prefix"]: + self.load_prefix() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secret"]: + self.load_secret() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["secrets_group"]: + self.load_secrets_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["git_repository"]: + self.load_git_repository() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["dynamic_group"]: + self.load_dynamic_group() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["computed_field"]: + self.load_computed_field() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["tag"]: + self.load_tag() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["graph_ql_query"]: + self.load_graph_ql_query() + if LIFECYCLE_MGMT: + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software"]: + self.load_software() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["software_image"]: + self.load_software_image() + if settings.PLUGINS_CONFIG["nautobot_ssot"]["bootstrap_models_to_sync"]["validated_software"]: + self.load_validated_software() diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py new file mode 100644 index 000000000..f5d2b91b3 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/__init__.py @@ -0,0 +1 @@ +"""DiffSync models and adapters for the bootstrap SSoT plugin.""" diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/base.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/base.py new file mode 100755 index 000000000..f702008c3 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/base.py @@ -0,0 +1,764 @@ +"""DiffSyncModel subclasses for Nautobot-to-bootstrap data sync.""" + +import datetime +from typing import List, Optional +from uuid import UUID + +from diffsync import DiffSyncModel + + +class Secret(DiffSyncModel): + """DiffSync model for Bootstrap Secrets.""" + + _modelname = "secret" + _identifiers = ("name",) + _attributes = ("provider", "parameters", "system_of_record") + _children = {} + + name: str + provider: str + parameters: dict + system_of_record: str + + uuid: Optional[UUID] = None + + +class SecretsGroup(DiffSyncModel): + """DiffSync model for Bootstrap SecretsGroups.""" + + _modelname = "secrets_group" + _identifiers = ("name",) + _attributes = ("secrets", "system_of_record") + _children = {} + + name: str + secrets: List["dict"] = [] + system_of_record: str + + uuid: Optional[UUID] = None + + +class GitRepository(DiffSyncModel): + """DiffSync model for Bootstrap GitRepositories.""" + + _modelname = "git_repository" + _identifiers = ("name",) + _attributes = ( + "url", + "branch", + "secrets_group", + "provided_contents", + "system_of_record", + ) + _children = {} + + name: str + url: str + branch: str + secrets_group: Optional[str] = None + provided_contents: List[str] = [] + system_of_record: str + + uuid: Optional[UUID] = None + + +class DynamicGroup(DiffSyncModel): + """DiffSync model for Bootstrap DynamicGroups.""" + + _modelname = "dynamic_group" + _identifiers = ("name", "content_type") + _attributes = ("dynamic_filter", "description", "system_of_record") + _children = {} + + name: str + content_type: str + dynamic_filter: dict + description: str + system_of_record: str + + uuid: Optional[UUID] = None + + +class ComputedField(DiffSyncModel): + """DiffSync model for Bootstrap ComputedFields.""" + + _modelname = "computed_field" + _identifiers = ("label",) + _attributes = ( + "content_type", + "template", + ) + _children = {} + + label: str + content_type: str + template: str + + uuid: Optional[UUID] = None + + +class Tag(DiffSyncModel): + """DiffSync model for Bootstrap Tags.""" + + _modelname = "tag" + _identifiers = ("name",) + _attributes = ("color", "content_types", "description", "system_of_record") + _children = {} + + name: str + color: str + content_types: List[str] = [] + description: str + system_of_record: str + + uuid: Optional[UUID] = None + + +class GraphQLQuery(DiffSyncModel): + """DiffSync Model for Bootstrap GraphQLQueries.""" + + _modelname = "graph_ql_query" + _identifiers = ("name",) + _attributes = ("query",) + _children = {} + + name: str + query: str + + uuid: Optional[UUID] = None + + +class Software(DiffSyncModel): + """DiffSync Model for Bootstrap Software.""" + + _modelname = "software" + _identifiers = ( + "version", + "platform", + ) + _attributes = ( + "alias", + "release_date", + "eos_date", + "long_term_support", + "pre_release", + "documentation_url", + "tags", + "system_of_record", + ) + _children = {} + + version: str + platform: str + alias: Optional[str] = None + release_date: Optional[datetime.date] = None + eos_date: Optional[datetime.date] = None + documentation_url: Optional[str] = None + long_term_support: bool + pre_release: bool + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class SoftwareImage(DiffSyncModel): + """DiffSync Model for Bootstrap SoftwareImage.""" + + _modelname = "software_image" + _identifiers = ("software",) + _attributes = ( + "platform", + "software_version", + "file_name", + "download_url", + "image_file_checksum", + "hashing_algorithm", + "default_image", + "tags", + "system_of_record", + ) + _children = {} + + software: str + platform: str + software_version: str + file_name: str + download_url: Optional[str] = None + image_file_checksum: Optional[str] = None + hashing_algorithm: str + default_image: bool + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class ValidatedSoftware(DiffSyncModel): + """DiffSync Model for Bootstrap ValidatedSoftware.""" + + _modelname = "validated_software" + _identifiers = ("software", "valid_since", "valid_until") + _attributes = ( + "devices", + "device_types", + "device_roles", + "inventory_items", + "object_tags", + "preferred_version", + "tags", + "platform", + "software_version", + "system_of_record", + ) + _children = {} + + devices: Optional[List[str]] = None + device_types: Optional[List[str]] = None + device_roles: Optional[List[str]] = None + inventory_items: Optional[List[str]] = None + object_tags: Optional[List[str]] = None + software: str + platform: str + software_version: str + valid_since: Optional[datetime.date] = None + valid_until: Optional[datetime.date] = None + preferred_version: bool + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class TenantGroup(DiffSyncModel): + """DiffSync Model for Bootstrap TenantGroup.""" + + _modelname = "tenant_group" + _identifiers = ("name", "parent") + _attributes = ( + "description", + "system_of_record", + ) + _children = {} + + name: str + parent: Optional[str] = None + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Tenant(DiffSyncModel): + """DiffSync Model for Bootstrap Tenant.""" + + _modelname = "tenant" + _identifiers = ("name",) + _attributes = ("description", "tenant_group", "tags", "system_of_record") + _children = {} + + name: str + tenant_group: Optional[str] = None + description: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Role(DiffSyncModel): + """DiffSync Model for Bootstrap Role.""" + + _modelname = "role" + _identifiers = ("name",) + _attributes = ( + "weight", + "description", + "color", + "content_types", + "system_of_record", + ) + _children = {} + + name: str + weight: Optional[int] = None + description: Optional[str] = None + color: Optional[str] = None + content_types: List[str] = [] + system_of_record: str + + uuid: Optional[UUID] = None + + +class Team(DiffSyncModel): + """DiffSync Model for Bootstrap Team.""" + + _modelname = "team" + _identifiers = ("name",) + _attributes = ("phone", "email", "address", "contacts", "system_of_record") + _children = {} + + name: str + phone: Optional[str] = None + email: Optional[str] = None + address: Optional[str] = None + contacts: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Contact(DiffSyncModel): + """DiffSync Model for Bootstrap Contact.""" + + _modelname = "contact" + _identifiers = ("name",) + _attributes = ("phone", "email", "address", "teams", "system_of_record") + _children = {} + + name: str + phone: Optional[str] = None + email: Optional[str] = None + address: Optional[str] = None + teams: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Manufacturer(DiffSyncModel): + """DiffSync Model for Bootstrap Manufacturer.""" + + _modelname = "manufacturer" + _identifiers = ("name",) + _attributes = ( + "description", + "system_of_record", + ) + _children = {} + + name: str + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Platform(DiffSyncModel): + """DiffSync Model for Bootstrap Platform.""" + + _modelname = "platform" + _identifiers = ( + "name", + "manufacturer", + ) + _attributes = ( + "network_driver", + "napalm_driver", + "napalm_arguments", + "description", + "system_of_record", + ) + _children = {} + + name: str + manufacturer: str + network_driver: Optional[str] = None + napalm_driver: Optional[str] = None + napalm_arguments: Optional[dict] = None + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class LocationType(DiffSyncModel): + """DiffSync Model for Bootstrap LocationType.""" + + _modelname = "location_type" + _identifiers = ("name",) + _attributes = ( + "parent", + "nestable", + "description", + "content_types", + "system_of_record", + ) + _children = {} + + name: str + parent: Optional[str] = None + nestable: Optional[bool] = None + description: Optional[str] = None + content_types: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Location(DiffSyncModel): + """DiffSync Model for Bootstrap Location.""" + + _modelname = "location" + _identifiers = ( + "name", + "location_type", + ) + _attributes = ( + "parent", + "status", + "facility", + "asn", + "time_zone", + "description", + "tenant", + "physical_address", + "shipping_address", + "latitude", + "longitude", + "contact_name", + "contact_phone", + "contact_email", + "tags", + "system_of_record", + ) + _children = {} + + name: str + location_type: str + parent: Optional[str] = None + status: Optional[str] = None + facility: Optional[str] = None + asn: Optional[int] = None + time_zone: Optional[str] = None + description: Optional[str] = None + tenant: Optional[str] = None + physical_address: Optional[str] = None + shipping_address: Optional[str] = None + latitude: Optional[float] = None + longitude: Optional[float] = None + contact_name: Optional[str] = None + contact_phone: Optional[str] = None + contact_email: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Provider(DiffSyncModel): + """DiffSync model for Bootstrap Provider.""" + + _modelname = "provider" + _identifiers = ("name",) + _attributes = ( + "asn", + "account_number", + "portal_url", + "noc_contact", + "admin_contact", + "tags", + "system_of_record", + ) + _children = {} + + name: str + asn: Optional[int] = None + account_number: Optional[str] = None + portal_url: Optional[str] = None + noc_contact: Optional[str] = None + admin_contact: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class ProviderNetwork(DiffSyncModel): + """DiffSync model for Bootstrap ProviderNetwork.""" + + _modelname = "provider_network" + _identifiers = ( + "name", + "provider", + ) + _attributes = ("description", "comments", "tags", "system_of_record") + _children = {} + + name: str + provider: str + description: Optional[str] = None + comments: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class CircuitType(DiffSyncModel): + """DiffSync model for Bootstrap CircuitType.""" + + _modelname = "circuit_type" + _identifiers = ("name",) + _attributes = ("description", "system_of_record") + _children = {} + + name: str + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Circuit(DiffSyncModel): + """DiffSync model for Bootstrap Circuit.""" + + _modelname = "circuit" + _identifiers = ( + "circuit_id", + "provider", + ) + _attributes = ( + "circuit_type", + "status", + "date_installed", + "commit_rate_kbps", + "description", + "tenant", + "tags", + "system_of_record", + ) + _children = {"circuit_termination": "terminations"} + + circuit_id: str + provider: str + circuit_type: str + status: str + date_installed: Optional[datetime.date] = None + commit_rate_kbps: Optional[int] = None + description: Optional[str] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + terminations: Optional[List["Circuit"]] = [] + system_of_record: Optional[str] = None + + uuid: Optional[UUID] = None + + +class CircuitTermination(DiffSyncModel): + """DiffSync model for Bootstrap CircuitTermination.""" + + _modelname = "circuit_termination" + _identifiers = ( + "name", + "termination_side", + "circuit_id", + ) + _attributes = ( + "termination_type", + "location", + "provider_network", + "port_speed_kbps", + "upstream_speed_kbps", + "cross_connect_id", + "patch_panel_or_ports", + "description", + "tags", + "system_of_record", + ) + _children = {} + + name: str + termination_type: str + termination_side: str + circuit_id: str + location: Optional[str] = None + provider_network: Optional[str] = None + port_speed_kbps: Optional[int] = None + upstream_speed_kbps: Optional[int] = None + cross_connect_id: Optional[str] = None + patch_panel_or_ports: Optional[str] = None + description: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Namespace(DiffSyncModel): + """DiffSync model for Bootstrap Namespace.""" + + _modelname = "namespace" + _identifiers = ("name",) + _attributes = ("description", "location", "system_of_record") + _children = {} + + name: str + description: Optional[str] = None + location: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class RiR(DiffSyncModel): + """DiffSync model for Bootstrap RiR.""" + + _modelname = "rir" + _identifiers = [ + "name", + ] + _attributes = [ + "private", + "description", + "system_of_record", + ] + _children = {} + + name: str + private: bool + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class VLANGroup(DiffSyncModel): + """DiffSync model for Bootstrap VLANGroup.""" + + _modelname = "vlan_group" + _identifiers = ("name",) + _attributes = ("location", "description", "system_of_record") + _children = {} + + name: str + location: Optional[str] = None + description: Optional[str] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class VLAN(DiffSyncModel): + """DiffSync model for Bootstrap VLAN.""" + + _modelname = "vlan" + _identifiers = ( + "name", + "vid", + "vlan_group", + ) + _attributes = ( + "description", + "status", + "role", + "locations", + "tenant", + "tags", + "system_of_record", + ) + _children = {} + + name: str + vid: int + vlan_group: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + role: Optional[str] = None + locations: Optional[List[str]] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class VRF(DiffSyncModel): + """DiffSync model for Bootstrap VRF.""" + + _modelname = "vrf" + _identifiers = ( + "name", + "namespace", + ) + _attributes = ( + "route_distinguisher", + "description", + "tenant", + "tags", + "system_of_record", + ) + _children = {} + + name: str + namespace: Optional[str] = None + route_distinguisher: Optional[str] = None + description: Optional[str] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class Prefix(DiffSyncModel): + """DiffSync model for Bootstrap Prefix.""" + + _modelname = "prefix" + _identifiers = ( + "network", + "namespace", + ) + _attributes = ( + "prefix_type", + "status", + "role", + "rir", + "date_allocated", + "description", + "vrfs", + "locations", + "vlan", + "tenant", + "tags", + "system_of_record", + ) + _children = {} + + network: str + namespace: str + prefix_type: Optional[str] = None + status: Optional[str] = None + role: Optional[str] = None + rir: Optional[str] = None + date_allocated: Optional[datetime.datetime] = None + description: Optional[str] = None + vrfs: Optional[List[str]] = None + locations: Optional[List[str]] = None + vlan: Optional[str] = None + tenant: Optional[str] = None + tags: Optional[List[str]] = None + system_of_record: str + + uuid: Optional[UUID] = None + + +class SSoTJob(DiffSyncModel): + """DiffSync model for Bootstrap SSoTJobs.""" + + _modelname = "ssot-job" + _identifiers = ( + "name", + "schedule", + ) + _attributes = () + _children = {} + + name: str + schedule: str + + uuid: Optional[UUID] = None + + +Circuit.model_rebuild() +CircuitTermination.model_rebuild() diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py new file mode 100755 index 000000000..86729e066 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/bootstrap.py @@ -0,0 +1,549 @@ +"""Nautobot Ssot Bootstrap DiffSync models for Nautobot Ssot Bootstrap SSoT.""" + +from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + VLAN, + VRF, + Circuit, + CircuitTermination, + CircuitType, + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + Location, + LocationType, + Manufacturer, + Namespace, + Platform, + Prefix, + Provider, + ProviderNetwork, + RiR, + Role, + Secret, + SecretsGroup, + Tag, + Team, + Tenant, + TenantGroup, + VLANGroup, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + Software, + SoftwareImage, + ValidatedSoftware, + ) + + +class BootstrapTenantGroup(TenantGroup): + """Bootstrap implementation of TenantGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create TenantGroup in Bootstrap from BootstrapTenantGroup object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update TenantGroup in Bootstrap from BootstrapTenantGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete TenantGroup in Bootstrap from BootstrapTenantGroup object.""" + return self + + +class BootstrapTenant(Tenant): + """Bootstrap implementation of TenantGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Tenant in Bootstrap from BootstrapTenant object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tenant in Bootstrap from BootstrapTenant object.""" + return super().update(attrs) + + def delete(self): + """Delete Tenant in Bootstrap from BootstrapTenant object.""" + return self + + +class BootstrapRole(Role): + """Bootstrap implementation of Role DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Role in Bootstrap from BootstrapRole object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Role in Bootstrap from BootstrapRole object.""" + return super().update(attrs) + + def delete(self): + """Delete Role in Bootstrap from BootstrapRole object.""" + return self + + +class BootstrapManufacturer(Manufacturer): + """Bootstrap implementation of Manufacturer DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Manufacturer in Bootstrap from BootstrapManufacturer object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Manufacturer in Bootstrap from BootstrapManufacturer object.""" + return super().update(attrs) + + def delete(self): + """Delete Manufacturer in Bootstrap from BootstrapManufacturer object.""" + return self + + +class BootstrapPlatform(Platform): + """Bootstrap implementation of Platform DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Platform in Bootstrap from BootstrapPlatform object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Platform in Bootstrap from BootstrapPlatform object.""" + return super().update(attrs) + + def delete(self): + """Delete Platform in Bootstrap from BootstrapPlatform object.""" + return self + + +class BootstrapLocationType(LocationType): + """Bootstrap implementation of LocationType DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create LocationType in Bootstrap from BootstrapLocationType object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update LocationType in Bootstrap from BootstrapLocationType object.""" + return super().update(attrs) + + def delete(self): + """Delete LocationType in Bootstrap from BootstrapLocationType object.""" + return self + + +class BootstrapLocation(Location): + """Bootstrap implementation of Location DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Location in Bootstrap from BootstrapLocation object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Location in Bootstrap from BootstrapLocation object.""" + return super().update(attrs) + + def delete(self): + """Delete Location in Bootstrap from BootstrapLocation object.""" + return self + + +class BootstrapTeam(Team): + """Bootstrap implementation of Team DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Team in Bootstrap from BootstrapTeam object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Team in Bootstrap from BootstrapTeam object.""" + return super().update(attrs) + + def delete(self): + """Delete Team in Bootstrap from BootstrapTeam object.""" + return self + + +class BootstrapContact(Contact): + """Bootstrap implementation of Contact DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Contact in Bootstrap from BootstrapContact object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Contact in Bootstrap from BootstrapContact object.""" + return super().update(attrs) + + def delete(self): + """Delete Contact in Bootstrap from BootstrapContact object.""" + return self + + +class BootstrapProvider(Provider): + """Bootstrap implementation of Provider DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Provider in Bootstrap from BootstrapProvider object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Provider in Bootstrap from BootstrapProvider object.""" + return super().update(attrs) + + def delete(self): + """Delete Provider in Bootstrap from BootstrapProvider object.""" + return self + + +class BootstrapProviderNetwork(ProviderNetwork): + """Bootstrap implementation of ProviderNetwork DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ProviderNetwork in Bootstrap from BootstrapProviderNetwork object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ProviderNetwork in Bootstrap from BootstrapProviderNetwork object.""" + return super().update(attrs) + + def delete(self): + """Delete ProviderNetwork in Bootstrap from BootstrapProviderNetwork object.""" + return self + + +class BootstrapCircuitType(CircuitType): + """Bootstrap implementation of CircuitType DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create CircuitType in Bootstrap from BootstrapCircuitType object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitType in Bootstrap from BootstrapCircuitType object.""" + return super().update(attrs) + + def delete(self): + """Delete CircuitType in Bootstrap from BootstrapCircuitType object.""" + return self + + +class BootstrapCircuit(Circuit): + """Bootstrap implementation of Circuit DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Circuit in Bootstrap from BootstrapCircuit object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Circuit in Bootstrap from BootstrapCircuit object.""" + return super().update(attrs) + + def delete(self): + """Delete Circuit in Bootstrap from BootstrapCircuit object.""" + return self + + +class BootstrapCircuitTermination(CircuitTermination): + """Bootstrap implementation of CircuitTermination DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create CircuitTermination in Bootstrap from BootstrapCircuitTermination object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitTermination in Bootstrap from BootstrapCircuitTermination object.""" + return super().update(attrs) + + def delete(self): + """Delete CircuitTermination in Bootstrap from BootstrapCircuitTermination object.""" + return self + + +class BootstrapSecret(Secret): + """Bootstrap implementation of Secret DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Secret in Bootstrap from BootstrapSecret object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Secret in Bootstrap from BootstrapSecret object.""" + return super().update(attrs) + + def delete(self): + """Delete Secret in Bootstrap from BootstrapSecret object.""" + return self + + +class BootstrapSecretsGroup(SecretsGroup): + """Bootstrap implementation of Secret DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Secret in Bootstrap from BootstrapDevice object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Secret in Bootstrap from BootstrapSecret object.""" + return super().update(attrs) + + def delete(self): + """Delete Secret in Bootstrap from BootstrapSecret object.""" + return self + + +class BootstrapGitRepository(GitRepository): + """Bootstrap implementation of GitRepository DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create GitRepository in Bootstrap from BootstrapGitRepository object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GitRepository in Bootstrap from BootstrapGitRepository object.""" + return super().update(attrs) + + def delete(self): + """Delete GitRepository in Bootstrap from BootstrapGitRepository object.""" + return self + + +class BootstrapDynamicGroup(DynamicGroup): + """Bootstrap implementation of DynamicGroup DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create DynamicGroup in Bootstrap from BootstrapDynamicGroup object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DynamicGroup in Bootstrap from BootstrapDynamicGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete DynamicGroup in Bootstrap from BootstrapDynamicGroup object.""" + return self + + +class BootstrapComputedField(ComputedField): + """Bootstrap implementation of ComputedField DiffSync model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ComputedField in Bootstrap from BootstrapComputedField object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ComputedField in Bootstrap from BootstrapComputedField object.""" + return super().update(attrs) + + def delete(self): + """Delete ComputedField in Bootstrap from BootstrapComputedField object.""" + return self + + +class BootstrapTag(Tag): + """Bootstrap implementation of Bootstrap Tag model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Tag in Bootstrap from BootstrapTag object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tag in Bootstrap from BootstrapTag object.""" + return super().update(attrs) + + def delete(self): + """Delete Tag in Bootstrap from BootstrapTag object.""" + return self + + +class BootstrapGraphQLQuery(GraphQLQuery): + """Bootstrap implementation of Bootstrap GraphQLQuery model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create GraphQLQuery in Bootstrap from BootstrapTag object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GraphQLQuery in Bootstrap from BootstrapGraphQLQuery object.""" + return super().update(attrs) + + def delete(self): + """Delete GraphQLQuery in Bootstrap from BootstrapTag object.""" + return self + + +if LIFECYCLE_MGMT: + + class BootstrapSoftware(Software): + """Bootstrap implementation of Bootstrap Software model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Software in Bootstrap from BootstrapSoftware object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Software in Bootstrap from BootstrapSoftware object.""" + return super().update(attrs) + + def delete(self): + """Delete Software in Bootstrap from BootstrapSoftware object.""" + return self + + class BootstrapSoftwareImage(SoftwareImage): + """Bootstrap implementation of Bootstrap SoftwareImage model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create SoftwareImage in Bootstrap from BootstrapSoftwareImage object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update SoftwareImage in Bootstrap from BootstrapSoftwareImage object.""" + return super().update(attrs) + + def delete(self): + """Delete SoftwareImage in Bootstrap from BootstrapSoftwareImage object.""" + return self + + class BootstrapValidatedSoftware(ValidatedSoftware): + """Bootstrap implementation of Bootstrap ValidatedSoftware model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create ValidatedSoftware in Bootstrap from BootstrapValidatedSoftware object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ValidatedSoftware in Bootstrap from BootstrapValidatedSoftware object.""" + return super().update(attrs) + + def delete(self): + """Delete ValidatedSoftware in Bootstrap from BootstrapValidatedSoftware object.""" + return self + + class BootstrapNamespace(Namespace): + """Bootstrap implementation of Bootstrap Namespace model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Namespace in Bootstrap from BootstrapNamespace object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Namespace in Bootstrap from BootstrapNamespace object.""" + return super().update(attrs) + + def delete(self): + """Delete Namespace in Bootstrap from BootstrapNamespace object.""" + return self + + class BootstrapRiR(RiR): + """Bootstrap implementation of Bootstrap RiR model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create RiR in Bootstrap from BootstrapRiR object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update RiR in Bootstrap from BootstrapRiR object.""" + return super().update(attrs) + + def delete(self): + """Delete RiR in Bootstrap from BootstrapRiR object.""" + return self + + class BootstrapVLANGroup(VLANGroup): + """Bootstrap implementation of Bootstrap VLANGroup model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create VLANGroup in Bootstrap from BootstrapVLANGroup object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLANGroup in Bootstrap from BootstrapVLANGroup object.""" + return super().update(attrs) + + def delete(self): + """Delete VLANGroup in Bootstrap from BootstrapVLANGroup object.""" + return self + + class BootstrapVLAN(VLAN): + """Bootstrap implementation of Bootstrap VLAN model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create VLAN in Bootstrap from BootstrapVLAN object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLAN in Bootstrap from BootstrapVLAN object.""" + return super().update(attrs) + + def delete(self): + """Delete VLAN in Bootstrap from BootstrapVLAN object.""" + return self + + class BootstrapVRF(VRF): + """Bootstrap implementation of Bootstrap VRF model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create VRF in Bootstrap from BootstrapVRF object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VRF in Bootstrap from BootstrapVRF object.""" + return super().update(attrs) + + def delete(self): + """Delete VRF in Bootstrap from BootstrapVRF object.""" + return self + + class BootstrapPrefix(Prefix): + """Bootstrap implementation of Bootstrap Prefix model.""" + + @classmethod + def create(cls, diffsync, ids, attrs): + """Create Prefix in Bootstrap from BootstrapPrefix object.""" + return super().create(diffsync=diffsync, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Prefix in Bootstrap from BootstrapPrefix object.""" + return super().update(attrs) + + def delete(self): + """Delete Prefix in Bootstrap from BootstrapPrefix object.""" + return self diff --git a/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py b/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py new file mode 100755 index 000000000..474bfef64 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/diffsync/models/nautobot.py @@ -0,0 +1,2468 @@ +"""Nautobot DiffSync models for bootstrap SSoT.""" + +import os +from datetime import datetime + +import pytz +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.db.models.deletion import ProtectedError +from nautobot.circuits.models import Circuit as ORMCircuit +from nautobot.circuits.models import CircuitTermination as ORMCircuitTermination +from nautobot.circuits.models import CircuitType as ORMCircuitType +from nautobot.circuits.models import Provider as ORMProvider +from nautobot.circuits.models import ProviderNetwork as ORMProviderNetwork +from nautobot.dcim.models import Device as ORMDevice +from nautobot.dcim.models import DeviceType as ORMDeviceType +from nautobot.dcim.models import InventoryItem as ORMInventoryItem +from nautobot.dcim.models import Location as ORMLocation +from nautobot.dcim.models import LocationType as ORMLocationType +from nautobot.dcim.models import Manufacturer as ORMManufacturer +from nautobot.dcim.models import Platform as ORMPlatform +from nautobot.extras.models import ComputedField as ORMComputedField +from nautobot.extras.models import Contact as ORMContact +from nautobot.extras.models import DynamicGroup as ORMDynamicGroup +from nautobot.extras.models import GitRepository as ORMGitRepository +from nautobot.extras.models import GraphQLQuery as ORMGraphQLQuery +from nautobot.extras.models import Role as ORMRole +from nautobot.extras.models import Secret as ORMSecret +from nautobot.extras.models import SecretsGroup as ORMSecretsGroup +from nautobot.extras.models import SecretsGroupAssociation as ORMSecretsGroupAssociation +from nautobot.extras.models import Status as ORMStatus +from nautobot.extras.models import Tag as ORMTag +from nautobot.extras.models import Team as ORMTeam +from nautobot.ipam.models import RIR as ORMRiR +from nautobot.ipam.models import VLAN as ORMVLAN +from nautobot.ipam.models import VRF as ORMVRF +from nautobot.ipam.models import Namespace as ORMNamespace +from nautobot.ipam.models import Prefix as ORMPrefix +from nautobot.ipam.models import VLANGroup as ORMVLANGroup +from nautobot.tenancy.models import Tenant as ORMTenant +from nautobot.tenancy.models import TenantGroup as ORMTenantGroup + +from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + VLAN, + VRF, + Circuit, + CircuitTermination, + CircuitType, + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + Location, + LocationType, + Manufacturer, + Namespace, + Platform, + Prefix, + Provider, + ProviderNetwork, + RiR, + Role, + Secret, + SecretsGroup, + Tag, + Team, + Tenant, + TenantGroup, + VLANGroup, +) +from nautobot_ssot.integrations.bootstrap.utils import ( + # lookup_contact_for_team, + check_sor_field, + lookup_content_type_for_taggable_model_path, + lookup_content_type_id, + lookup_team_for_contact, +) + +try: + import nautobot_device_lifecycle_mgmt # noqa: F401 + + LIFECYCLE_MGMT = True +except ImportError: + LIFECYCLE_MGMT = False + +if LIFECYCLE_MGMT: + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareImageLCM as ORMSoftwareImage, + ) + + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + SoftwareLCM as ORMSoftware, + ) + + # noqa: F401 + from nautobot_device_lifecycle_mgmt.models import ( + ValidatedSoftwareLCM as ORMValidatedSoftware, + ) + + from nautobot_ssot.integrations.bootstrap.diffsync.models.base import ( + Software, + SoftwareImage, + ValidatedSoftware, + ) + + +class NautobotTenantGroup(TenantGroup): + """Nautobot implementation of Bootstrap TenantGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create TenantGroup in Nautobot from NautobotTenantGroup object.""" + _parent = None + if ids["parent"]: + _parent = ORMTenantGroup.objects.get(name=ids["parent"]) + adapter.job.logger.info(f'Creating Nautobot TenantGroup: {ids["name"]}') + if _parent is not None: + new_tenant_group = ORMTenantGroup(name=ids["name"], parent=_parent, description=attrs["description"]) + else: + new_tenant_group = ORMTenantGroup(name=ids["name"], description=attrs["description"]) + new_tenant_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_tenant_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_tenant_group.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update TenantGroup in Nautobot from NautobotTenantGroup object.""" + self.adapter.job.logger.debug(f"Updating TenantGroup {self.name} with {attrs}") + _update_tenant_group = ORMTenantGroup.objects.get(name=self.name) + if "description" in attrs: + _update_tenant_group.description = attrs["description"] + if not check_sor_field(_update_tenant_group): + _update_tenant_group.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_tenant_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_tenant_group.validated_save() + return super().update(attrs) + + def delete(self): + """Delete TenantGroup in Nautobot from NautobotTenantGroup object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete TenantGroup: {self} - {self.uuid}") + _nb_tenant_group = ORMTenantGroup.objects.get(id=self.uuid) + super().delete() + _nb_tenant_group.delete() + return self + except ORMTenantGroup.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find TenantGroup {self.uuid} for deletion. {err}") + + +class NautobotTenant(Tenant): + """Nautobot implementation of Bootstrap Tenant model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Tenant in Nautobot from NautobotTenant object.""" + _tags = [] + _tenant_group = None + _description = "" + if "tags" in attrs: + for _tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=_tag)) + if "tenant_group" in attrs: + try: + _tenant_group = ORMTenantGroup.objects.get(name=attrs["tenant_group"]) + except ORMTenantGroup.DoesNotExist: + adapter.job.logger.warning( + f'Could not find TenantGroup {attrs["tenant_group"]} to assign to {ids["name"]}' + ) + if "description" in attrs: + _description = attrs["description"] + adapter.job.logger.info(f'Creating Nautobot Tenant: {ids["name"]}') + new_tenant = ORMTenant( + name=ids["name"], + tenant_group=_tenant_group, + tags=_tags, + description=_description, + ) + new_tenant.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_tenant.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_tenant.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tenant in Nautobot from NautobotTenant object.""" + _update_tenant = ORMTenant.objects.get(name=self.name) + if "description" in attrs: + _update_tenant.description = attrs["description"] + if "tenant_group" in attrs: + try: + _update_tenant.tenant_group = ORMTenantGroup.objects.get(name=attrs["tenant_group"]) + except ORMTenantGroup.DoesNotExist: + self.adapter.job.logger.warning( + f'Could not find TenantGroup {attrs["tenant_group"]} to assign to {self.name}' + ) + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_tenant.tags.clear() + for _tag in attrs["tags"]: + _update_tenant.tags.add(_tag) + if not check_sor_field(_update_tenant): + _update_tenant.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_tenant.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_tenant.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Tenant in Nautobot from NautobotTenant object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Tenant: {self} - {self.uuid}") + _nb_tenant = ORMTenant.objects.get(id=self.uuid) + super().delete() + _nb_tenant.delete() + return self + except ORMTenant.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Tenant {self.uuid} for deletion. {err}") + + +class NautobotRole(Role): + """Nautobot implementation of Bootstrap Role model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Role in Nautobot from NautobotRole object.""" + _content_types = [] + adapter.job.logger.info(f'Creating Nautobot Role: {ids["name"]}') + for _model in attrs["content_types"]: + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _new_role = ORMRole( + name=ids["name"], + weight=attrs["weight"], + description=attrs["description"], + color=attrs["color"], + ) + _new_role.validated_save() + _new_role.content_types.set(_content_types) + _new_role.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_role.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_role.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Role in Nautobot from NautobotRole object.""" + _content_types = [] + self.adapter.job.logger.info(f"Updating Role {self.name}") + _update_role = ORMRole.objects.get(name=self.name) + if "weight" in attrs: + _update_role.weight = attrs["weight"] + if "description" in attrs: + _update_role.description = attrs["description"] + if "color" in attrs: + _update_role.color = attrs["color"] + if "content_types" in attrs: + for _model in attrs["content_types"]: + self.adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _update_role.content_types.set(_content_types) + if not check_sor_field(_update_role): + _update_role.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_role.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_role.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Role in Nautobot from NautobotRole object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Role: {self} - {self.uuid}") + _role = ORMRole.objects.get(id=self.uuid) + _role.delete() + super().delete() + return self + except ORMTenant.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Role {self.uuid} for deletion. {err}") + + +class NautobotManufacturer(Manufacturer): + """Nautobot implementation of Bootstrap Manufacturer model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Manufacturer in Nautobot from NautobotManufacturer object.""" + adapter.job.logger.debug(f'Creating Nautobot Manufacturer {ids["name"]}') + _new_manufacturer = ORMManufacturer(name=ids["name"], description=attrs["description"]) + _new_manufacturer.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_manufacturer.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_manufacturer.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Manufacturer in Nautobot from NautobotManufacturer object.""" + _update_manufacturer = ORMManufacturer.objects.get(name=self.name) + self.adapter.job.logger.info(f"Updating Manufacturer {self.name}") + if "description" in attrs: + _update_manufacturer.description = attrs["description"] + if not check_sor_field(_update_manufacturer): + _update_manufacturer.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_manufacturer.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_manufacturer.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Manufacturer in Nautobot from NautobotManufacturer object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Manufacturer: {self} - {self.uuid}") + _manufacturer = ORMManufacturer.objects.get(id=self.uuid) + _manufacturer.delete() + super().delete() + return self + except ORMManufacturer.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Manufacturer {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Manufacturer {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotPlatform(Platform): + """Nautobot implementation of Bootstrap Platform model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Platform in Nautobot from NautobotPlatform object.""" + adapter.job.logger.info(f'Creating Nautobot Platform {ids["name"]}') + try: + _manufacturer = ORMManufacturer.objects.get(name=ids["manufacturer"]) + _new_platform = ORMPlatform( + name=ids["name"], + manufacturer=_manufacturer, + network_driver=attrs["network_driver"], + napalm_driver=attrs["napalm_driver"], + napalm_args=attrs["napalm_arguments"], + description=attrs["description"], + ) + _new_platform.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_platform.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_platform.validated_save() + except ORMManufacturer.DoesNotExist: + adapter.job.logger.warning( + f'Manufacturer {ids["manufacturer"]} does not exist in Nautobot, be sure to create it.' + ) + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Platform in Nautobot from NautobotPlatform object.""" + _update_platform = ORMPlatform.objects.get(name=self.name) + if "network_driver" in attrs: + _update_platform.network_driver = attrs["network_driver"] + if "napalm_driver" in attrs: + _update_platform.napalm_driver = attrs["napalm_driver"] + if "napalm_arguments" in attrs: + _update_platform.napalm_args = attrs["napalm_arguments"] + if "description" in attrs: + _update_platform.description = attrs["description"] + _update_platform.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_platform): + _update_platform.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_platform.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Platform in Nautobot from NautobotPlatform object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Platform: {self} - {self.uuid}") + _platform = ORMPlatform.objects.get(id=self.uuid) + _platform.delete() + super().delete() + return self + except ORMManufacturer.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Platform {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Platform {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotLocationType(LocationType): + """Nautobot implementation of Bootstrap LocationType model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create LocationType in Nautobot from NautobotLocationType object.""" + _content_types = [] + adapter.job.logger.info(f'Creating Nautobot LocationType: {ids["name"]}') + _parent = None + for _model in attrs["content_types"]: + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + if "parent" in attrs: + try: + _parent = ORMLocationType.objects.get(name=attrs["parent"]) + except ORMLocationType.DoesNotExist: + adapter.job.logger.warning( + f'Could not find LocationType {attrs["parent"]} in Nautobot, ensure it exists.' + ) + _new_location_type = ORMLocationType( + name=ids["name"], + parent=_parent, + nestable=attrs["nestable"] if not None else False, + description=attrs["description"], + ) + _new_location_type.validated_save() + for _model in attrs["content_types"]: + adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _new_location_type.content_types.set(_content_types) + _new_location_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_location_type.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_location_type.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update LocationType in Nautobot from NautobotLocationType object.""" + _content_types = [] + self.adapter.job.logger.info(f"Updating LocationType {self.name}") + _update_location_type = ORMLocationType.objects.get(id=self.uuid) + if "parent" in attrs: + try: + _parent = ORMLocationType.objects.get(name=attrs["parent"]) + _update_location_type.parent = _parent + except ORMLocationType.DoesNotExist: + self.adapter.job.logger.warning( + f'Parent LocationType {attrs["parent"]} does not exist, ensure it exists first.' + ) + if "nestable" in attrs: + _update_location_type.nestable = attrs["nestable"] + if "description" in attrs: + _update_location_type.description = attrs["description"] + if "content_types" in attrs: + for _model in attrs["content_types"]: + self.adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _update_location_type.content_types.set(_content_types) + if not check_sor_field(_update_location_type): + _update_location_type.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_location_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_location_type.validated_save() + return super().update(attrs) + + def delete(self): + """Delete LocationType in Nautobot from NautobotLocationType object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete LocationType: {self} - {self.uuid}") + _location_type = ORMLocationType.objects.get(id=self.uuid) + _location_type.delete() + super().delete() + return self + except ORMLocationType.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find LocationType {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete LocationType {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotLocation(Location): + """Nautobot implementation of Bootstrap Location model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Location in Nautobot from NautobotLocation object.""" + adapter.job.logger.info(f'Creating Nautobot Location {ids["name"]}') + + try: + _parent = None + _tenant = None + _timezone = None + _tags = [] + _location_type = ORMLocationType.objects.get(name=ids["location_type"]) + _status = ORMStatus.objects.get(name=attrs["status"]) + if "parent" in attrs: + if attrs["parent"]: + _parent = ORMLocation.objects.get(name=attrs["parent"]) + if "tenant" in attrs: + if attrs["tenant"]: + _tenant = Tenant.objects.get(name=attrs["tenant"]) + if "time_zone" in attrs: + if attrs["time_zone"]: + _timezone = pytz.timezone(attrs["time_zone"]) + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _new_location = ORMLocation( + name=ids["name"], + location_type=_location_type, + parent=_parent if not None else None, + status=_status, + facility=attrs["facility"], + asn=attrs["asn"], + time_zone=_timezone, + description=attrs["description"], + tenant=_tenant, + physical_address=attrs["physical_address"], + shipping_address=attrs["shipping_address"], + latitude=attrs["latitude"], + longitude=attrs["longitude"], + contact_name=attrs["contact_name"], + contact_phone=attrs["contact_phone"], + contact_email=attrs["contact_email"], + tags=_tags, + ) + _new_location.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_location.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_location.validated_save() + except ORMStatus.DoesNotExist: + adapter.job.logger.warning(f'Status {attrs["status"]} could not be found. Make sure it exists.') + except ORMLocationType.DoesNotExist: + adapter.job.logger.warning( + f'LocationType {attrs["location_type"]} could not be found. Make sure it exists.' + ) + except ORMTenant.DoesNotExist: + adapter.job.logger.warning(f'Tenant {attrs["tenant"]} does not exist, verify it is created.') + except pytz.UnknownTimeZoneError: + adapter.job.logger.warning( + f'Timezone {attrs["time_zone"]} could not be found. Verify the timezone is a valid timezone.' + ) + except ORMLocation.DoesNotExist: + adapter.job.logger.warning(f'Parent Location {attrs["parent"]} does not exist, ensure it exists first.') + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Location in Nautobot from NautobotLocation object.""" + self.adapter.job.logger.info(f"Updating Location {self.name}.") + _parent = None + _tenant = None + _timezone = None + _location_type = ORMLocationType.objects.get(name=self.location_type) + _update_location = ORMLocation.objects.get(name=self.name, location_type=_location_type) + if "parent" in attrs: + if attrs["parent"]: + _parent = ORMLocation.objects.get(name=attrs["parent"]) + _update_location.parent = _parent + if "status" in attrs: + _status = ORMStatus.objects.get(name=attrs["status"]) + _update_location.status = _status + if "facility" in attrs: + _update_location.facility = attrs["facility"] + if "asn" in attrs: + _update_location.asn = attrs["location"] + if "time_zone" in attrs: + if attrs["time_zone"]: + _timezone = pytz.timezone(attrs["time_zone"]) + _update_location.time_zone = _timezone + if "description" in attrs: + _update_location.description = attrs["description"] + if "tenant" in attrs: + _tenant = Tenant.objects.get(name=attrs["tenant"]) + _update_location.tenant = _tenant + if "physical_address" in attrs: + _update_location.physical_address = attrs["physical_address"] + if "shipping_address" in attrs: + _update_location.shipping_address = attrs["shipping_address"] + if "latitude" in attrs: + _update_location.latitude = attrs["latitude"] + if "longitude" in attrs: + _update_location.longitude = attrs["longitude"] + if "contact_name" in attrs: + _update_location.contact_name = attrs["contact_name"] + if "contact_phone" in attrs: + _update_location.contact_name = attrs["contact_phone"] + if "contact_email" in attrs: + _update_location.contact_name = attrs["contact_email"] + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _update_location.tags.clear() + for _tag in _tags: + _update_location.tags.add(_tag) + if not check_sor_field(_update_location): + _update_location.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_location.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_location.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Location in Nautobot from NautobotLocation object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Location: {self} - {self.uuid}") + _location = ORMLocation.objects.get(id=self.uuid) + _location.delete() + super().delete() + return self + except ORMLocation.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Location {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Location {self.name}, as it is referenced by another object. {err}" + ) + + +class NautobotTeam(Team): + """Nautobot implementation of Team DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Team in Nautobot from NautobotTeam object.""" + adapter.job.logger.debug(f'Creating Nautobot Team {ids["name"]}') + _new_team = ORMTeam( + name=ids["name"], + phone=attrs["phone"], + email=attrs["email"], + address=attrs["address"], + ) + _new_team.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_team.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_team.validated_save() + # TODO: Need to consider how to allow loading from teams or contacts models. + # if "contacts" in attrs: + # # FIXME: There might be a better way to handle this that's easier on the database. + # _new_team.contacts.clear() + # for _contact in attrs["contacts"]: + # adapter.job.logger.debug(f'Looking up Contact: {_contact} for Team: {ids["name"]}.') + # _new_team.contact.add(lookup_contact_for_team(contact=_contact)) + # _new_team.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Team in Nautobot from NautobotTeam object.""" + _update_team = ORMTeam.objects.get(name=self.name) + self.adapter.job.logger.info(f"Updating Team {self.name}") + if "phone" in attrs: + _update_team.phone = attrs["phone"] + if "email" in attrs: + _update_team.email = attrs["email"] + if "address" in attrs: + _update_team.address = attrs["address"] + # TODO: Need to consider how to allow loading from teams or contacts models. + # if "contacts" in attrs: + # # FIXME: There might be a better way to handle this that's easier on the database. + # _update_team.contacts.clear() + # for _contact in attrs["contacts"]: + # self.adapter.job.logger.debug(f"Looking up Contact: {_contact} for Team: {self.name}.") + # _update_team.contacts.add(lookup_contact_for_team(contact=_contact)) + if not check_sor_field(_update_team): + _update_team.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_team.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_team.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Team in Nautobot from NautobotTeam object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Team: {self} - {self.uuid}") + _team = ORMTeam.objects.get(id=self.uuid) + _team.delete() + super().delete() + return self + except ORMTeam.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Team {self.uuid} for deletion. {err}") + + +class NautobotContact(Contact): + """Nautobot implementation of Contact DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Contact in Nautobot from NautobotContact object.""" + adapter.job.logger.debug(f'Creating Nautobot Contact {ids["name"]}') + _new_contact = ORMContact( + name=ids["name"], + phone=attrs["phone"], + email=attrs["email"], + address=attrs["address"], + ) + _new_contact.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_contact.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_contact.validated_save() + if "teams" in attrs: + for _team in attrs["teams"]: + adapter.job.logger.debug(f'Looking up Team: {_team} for Contact: {ids["name"]}.') + _new_contact.teams.add(lookup_team_for_contact(team=_team)) + _new_contact.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Contact in Nautobot from NautobotContact object.""" + _update_contact = ORMContact.objects.get(name=self.name) + self.adapter.job.logger.info(f"Updating Contact {self.name}") + if "phone" in attrs: + _update_contact.phone = attrs["phone"] + if "email" in attrs: + _update_contact.email = attrs["email"] + if "address" in attrs: + _update_contact.address = attrs["address"] + if "teams" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_contact.teams.clear() + for _team in attrs["teams"]: + self.adapter.job.logger.debug(f"Looking up Team: {_team} for Contact: {self.name}.") + _update_contact.teams.add(lookup_team_for_contact(team=_team)) + if not check_sor_field(_update_contact): + _update_contact.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_contact.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_contact.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Contact in Nautobot from NautobotContact object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Team: {self} - {self.uuid}") + _contact = ORMContact.objects.get(id=self.uuid) + _contact.delete() + super().delete() + return self + except ORMTenant.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Contact {self.uuid} for deletion. {err}") + + +class NautobotProvider(Provider): + """Nautobot implementation of Provider DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Provider in Nautobot from NautobotProvider object.""" + adapter.job.logger.info(f'Creating Nautobot Provider: {ids["name"]}') + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _new_provider = ORMProvider( + name=ids["name"], + asn=attrs["asn"], + account=attrs["account_number"], + portal_url=attrs["portal_url"], + noc_contact=attrs["noc_contact"], + admin_contact=attrs["admin_contact"], + ) + for _tag in attrs["tags"]: + try: + _new_provider.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_provider.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_provider.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_provider.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Provider in Nautobot from NautobotProvider object.""" + self.adapter.job.logger.debug(f"Updating Nautobot Provider {self.name}") + _update_provider = ORMProvider.objects.get(id=self.uuid) + if "asn" in attrs: + _update_provider.asn = attrs["asn"] + if "account_number" in attrs: + _update_provider.account = attrs["account_number"] + if "portal_url" in attrs: + _update_provider.portal_url = attrs["portal_url"] + if "noc_contact" in attrs: + _update_provider.noc_contact = attrs["noc_contact"] + if "admin_contact" in attrs: + _update_provider.admin_contact = attrs["admin_contact"] + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_provider.tags.clear() + for _tag in attrs["tags"]: + try: + _update_provider.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + if not check_sor_field(_update_provider): + _update_provider.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_provider.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_provider.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Provider in Nautobot from NautobotProvider object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Provider: {self} - {self.uuid}") + _nb_provider = ORMProvider.objects.get(id=self.uuid) + _nb_provider.delete() + super().delete() + return self + except ORMProvider.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Provider {self.uuid} for deletion. {err}") + + +class NautobotProviderNetwork(ProviderNetwork): + """Nautobot implementation of ProviderNetwork DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create ProviderNetwork in Nautobot from NautobotProviderNetwork object.""" + adapter.job.logger.info(f'Creating Nautobot ProviderNetwork: {ids["name"]}') + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _new_provider_network = ORMProviderNetwork( + name=ids["name"], + provider=ORMProvider.objects.get(name=ids["provider"]), + description=attrs["description"], + comments=attrs["comments"], + ) + for _tag in attrs["tags"]: + try: + _new_provider_network.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_provider_network.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_provider_network.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_provider_network.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ProviderNetwork in Nautobot from NautobotProviderNetwork object.""" + self.adapter.job.logger.debug(f"Updating Nautobot ProviderNetwork {self.name}") + _update_provider_network = ORMProviderNetwork.objects.get(id=self.uuid) + if "description" in attrs: + _update_provider_network.description = attrs["description"] + if "comments" in attrs: + _update_provider_network.comments = attrs["comments"] + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_provider_network.tags.clear() + for _tag in attrs["tags"]: + try: + _update_provider_network.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _update_provider_network.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_provider_network): + _update_provider_network.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_provider_network.validated_save() + return super().update(attrs) + + def delete(self): + """Delete ProviderNetwork in Nautobot from NautobotProviderNetwork object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete ProviderNetwork: {self} - {self.uuid}") + _nb_provider_network = ORMProviderNetwork.objects.get(id=self.uuid) + _nb_provider_network.delete() + super().delete() + return self + except ORMProviderNetwork.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ProviderNetwork {self.uuid} for deletion. {err}") + + +class NautobotCircuitType(CircuitType): + """Nautobot implementation of CircuitType DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create CircuitType in Nautobot from NautobotCircuitType object.""" + adapter.job.logger.info(f'Creating Nautobot CircuitType: {ids["name"]}') + _new_circuit_type = ORMCircuitType( + name=ids["name"], + description=attrs["description"], + ) + _new_circuit_type.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_circuit_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_circuit_type.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitType in Nautobot from NautobotCircuitType object.""" + self.adapter.job.logger.debug(f"Updating Nautobot CircuitType {self.name}") + _update_circuit_type = ORMCircuitType.objects.get(id=self.uuid) + if "description" in attrs: + _update_circuit_type.description = attrs["description"] + _update_circuit_type.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_circuit_type): + _update_circuit_type.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_circuit_type.validated_save() + return super().update(attrs) + + def delete(self): + """Delete CircuitType in Nautobot from NautobotCircuitType object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Circuittype: {self} - {self.uuid}") + _nb_circuit_type = ORMCircuitType.objects.get(id=self.uuid) + _nb_circuit_type.delete() + super().delete() + return self + except ORMCircuitType.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find CircuitType {self.uuid} for deletion. {err}") + + +class NautobotCircuit(Circuit): + """Nautobot implementation of Circuit DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Circuit in Nautobot from NautobotCircuit object.""" + adapter.job.logger.info(f'Creating Nautobot Circuit: {ids["circuit_id"]}') + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + _provider = ORMProvider.objects.get(name=ids["provider"]) + _circuit_type = ORMCircuitType.objects.get(name=attrs["circuit_type"]) + _status = ORMStatus.objects.get(name=attrs["status"]) + _tenant = None + if "tenant" in attrs: + if attrs["tenant"] is not None: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + _new_circuit = ORMCircuit( + cid=ids["circuit_id"], + provider=_provider, + circuit_type=_circuit_type, + status=_status, + install_date=(attrs["date_installed"] if attrs["date_installed"] is not None else None), + commit_rate=attrs["commit_rate_kbps"], + description=attrs["description"], + tenant=_tenant, + ) + for _tag in attrs["tags"]: + try: + _new_circuit.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_circuit.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_circuit.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_circuit.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Circuit in Nautobot from NautobotCircuit object.""" + self.adapter.job.logger.debug(f"Updating Nautobot Circuit {self.circuit_id}") + _update_circuit = ORMCircuit.objects.get(id=self.uuid) + if "circuit_type" in attrs: + _circuit_type = ORMCircuitType.objects.get(name=attrs["circuit_type"]) + _update_circuit.circuit_type = _circuit_type + if "status" in attrs: + _status = ORMStatus.objects.get(name=attrs["status"]) + _update_circuit.status = _status + if "date_installed" in attrs: + _update_circuit.install_date = attrs["date_installed"] + if "commit_rate_kbps" in attrs: + _update_circuit.commit_rate = attrs["commit_rate_kbps"] + if "description" in attrs: + _update_circuit.description = attrs["description"] + if "tenant" in attrs: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + _update_circuit.tenant = _tenant + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_circuit.tags.clear() + for _tag in attrs["tags"]: + try: + _update_circuit.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + if "terminations" in attrs: + # TODO: Implement circuit terminations + pass + if not check_sor_field(_update_circuit): + _update_circuit.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_circuit.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_circuit.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Circuit in Nautobot from NautobotCircuit object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete Circuit: {self} - {self.uuid}") + _circuit = ORMCircuit.objects.get(id=self.uuid) + _circuit.delete() + super().delete() + return self + except ORMProvider.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Circuit {self.uuid} for deletion. {err}") + + +class NautobotCircuitTermination(CircuitTermination): + """Nautobot implementation of CircuitTermination DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create CircuitTermination in Nautobot from NautobotCircuitTermination object.""" + adapter.job.logger.info(f'Creating Nautobot CircuitTermination {ids["name"]}') + _name_parts = ids["name"].split("__", 2) + _circuit_id = _name_parts[0] + _provider_name = _name_parts[1] + _term_side = _name_parts[2] + try: + _provider = ORMProvider.objects.get(name=_provider_name) + except ORMProvider.DoesNotExist: + adapter.job.logger.warning(f"Provider {_provider_name} does not exist in Nautobot. Please create it.") + try: + _circuit = ORMCircuit.objects.get(cid=_circuit_id, provider=_provider) + except ORMCircuit.DoesNotExist: + adapter.job.logger.warning(f"Circuit {_circuit_id} does not exist in Nautobot. Please create it.") + if "tags" in attrs: + _tags = [] + for _tag in attrs["tags"]: + _tags.append(ORMTag.get(name=_tag)) + if attrs["termination_type"] == "Provider Network": + try: + _provider_network = ORMProviderNetwork.objects.get(name=attrs["provider_network"]) + except ORMProviderNetwork.DoesNotExist: + adapter.job.logger.warning( + f'ProviderNetwork {attrs["provider_network"]} does not exist in Nautobot. Please create it.' + ) + _new_circuit_termination = ORMCircuitTermination( + provider_network=_provider_network, + circuit=_circuit, + term_side=_term_side, + xconnect_id=attrs["cross_connect_id"], + pp_info=attrs["patch_panel_or_ports"], + description=attrs["description"], + upstream_speed=attrs["upstream_speed_kbps"], + port_speed=attrs["port_speed_kbps"], + ) + if attrs["termination_type"] == "Location": + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + except ORMLocation.DoesNotExist: + adapter.job.logger.warning( + f'Location {attrs["location"]} does not exist in Nautobot. Please create it.' + ) + _new_circuit_termination = ORMCircuitTermination( + location=_location, + circuit=_circuit, + term_side=_term_side, + xconnect_id=attrs["cross_connect_id"], + pp_info=attrs["patch_panel_or_ports"], + description=attrs["description"], + upstream_speed=attrs["upstream_speed_kbps"], + port_speed=attrs["port_speed_kbps"], + ) + for _tag in _tags: + try: + _new_circuit_termination.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _new_circuit_termination.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _new_circuit_termination.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_circuit_termination.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update CircuitTermination in Nautobot from NautobotCircuitTermination object.""" + self.adapter.job.logger.debug(f"Updating Nautobot CircuitTermination {self.name}") + _update_circuit_termination = ORMCircuitTermination.objects.get(id=self.uuid) + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + if _update_circuit_termination.provider_network: + _update_circuit_termination.provider_network = None + _update_circuit_termination.location = _location + except ORMLocation.DoesNotExist: + self.adapter.job.logger.warning( + f'Location {attrs["location"]} does not exist in Nautobot. Please create it.' + ) + if "provider_network" in attrs: + try: + _provider_network = ORMProviderNetwork.objects.get(name=attrs["provider_network"]) + if _update_circuit_termination.location: + _update_circuit_termination.location = None + _update_circuit_termination.provider_network = _provider_network + except ORMProviderNetwork.DoesNotExist: + self.adapter.job.logger.warning( + f'ProviderNetwork {attrs["provider_network"]} does not exist in Nautobot. Please create it.' + ) + if "port_speed_kbps" in attrs: + _update_circuit_termination.port_speed = attrs["port_speed_kbps"] + if "upstream_speed_kbps" in attrs: + _update_circuit_termination.upstream_speed = attrs["upstream_speed_kbps"] + if "cross_connect_id" in attrs: + _update_circuit_termination.xconnect_id = attrs["xconnect_id"] + if "patch_panel_or_ports" in attrs: + _update_circuit_termination.pp_info = attrs["pp_info"] + if "description" in attrs: + _update_circuit_termination.description = attrs["description"] + if "tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_circuit_termination.tags.clear() + for _tag in attrs["tags"]: + try: + _update_circuit_termination.tags.add(ORMTag.objects.get(name=_tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning(f"Tag {_tag} does not exist in Nautobot.") + _update_circuit_termination.custom_field_data.update( + {"last_synced_from_sor": datetime.today().date().isoformat()} + ) + if not check_sor_field(_update_circuit_termination): + _update_circuit_termination.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_circuit_termination.validated_save() + return super().update(attrs) + + def delete(self): + """Delete CircuitTermination in Nautobot from NautobotCircuitTermination object.""" + try: + self.adapter.job.logger.debug(f"Attempting to delete CircuitTermination: {self} - {self.uuid}") + _nb_circuit_termination = ORMCircuitTermination.objects.get(id=self.uuid) + _nb_circuit_termination.delete() + super().delete() + return self + except ORMCircuitTermination.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ProviderNetwork {self.uuid} for deletion. {err}") + + +class NautobotNamespace(Namespace): + """Nautobot implementation of Nautobot Namespace model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Namespace in Nautobot from NautobotNamespace object.""" + adapter.job.logger.info(f'Creating Nautobot Namespace {ids["name"]}') + new_namespace = ORMNamespace( + name=ids["name"], + description=attrs["description"], + ) + new_namespace.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_namespace.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_namespace.validated_save() + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + new_namespace.location = _location + new_namespace.validated_save() + except ORMLocation.DoesNotExist: + adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Namespace in Nautobot from NautobotNamespace object.""" + self.adapter.job.logger.debug(f"Updating Nautobot Namespace {self.name}.") + _update_namespace = ORMNamespace.objects.get(id=self.uuid) + if "description" in attrs: + _update_namespace.description = attrs["description"] + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + _update_namespace.location = _location + except ORMLocation.DoesNotExist: + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if not check_sor_field(_update_namespace): + _update_namespace.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_namespace.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_namespace.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete Namespace in Nautobot from NautobotNamespace object.""" + self.adapter.job.logger.debug(f"Delete Nautobot Namespace {self.uuid}") + try: + _namespace = ORMNamespace.objects.get(id=self.uuid) + super().delete() + _namespace.delete() + return self + except ORMNamespace.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Namespace {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Namespace {self.name} due to existing references. Error: {err}." + ) + + +class NautobotRiR(RiR): + """Nautobot implementation of Nautobot RiR model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create RiR in Nautobot from NautobotRiR object.""" + adapter.job.logger.info(f'Creating Nautobot RiR: {ids["name"]}') + new_rir = ORMRiR( + name=ids["name"], + is_private=attrs["private"], + description=attrs["description"], + ) + new_rir.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_rir.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_rir.validated_save() + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update RiR in Nautobot from NautobotRiR object.""" + self.adapter.job.logger.info(f"Updating Nautobot RiR {self.name}") + _update_rir = ORMRiR.objects.get(id=self.uuid) + if "private" in attrs: + _update_rir.is_private = attrs["private"] + if "description" in attrs: + _update_rir.description = attrs["description"] + if not check_sor_field(_update_rir): + _update_rir.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_rir.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_rir.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete RiR in Nautobot from NautobotRiR object.""" + self.adapter.job.logger.debug(f"Delete Nautobot Namespace {self.uuid}") + try: + _rir = ORMRiR.objects.get(id=self.uuid) + super().delete() + _rir.delete() + return self + except ORMRiR.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find RiR {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete RiR {self.name} due to existing references. Error: {err}." + ) + + +class NautobotVLANGroup(VLANGroup): + """Nautobot implementation of Nautobot VLANGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create VLANGroup in Nautobot from NautobotVLANGroup object.""" + adapter.job.logger.info(f'Creating Nautobot VLANGroup: {ids["name"]}') + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + except ORMLocation.DoesNotExist: + _location = None + adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_vlan_group = ORMVLANGroup( + name=ids["name"], + location=_location, + description=attrs["description"], + ) + new_vlan_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_vlan_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_vlan_group.validated_save() + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLANGroup in Nautobot from NautobotVLANGroup object.""" + self.adapter.job.logger.info(f"Updating Nautobot VLANGroup {self.name}") + _update_vlan_group = ORMVLANGroup.objects.get(id=self.uuid) + if "location" in attrs: + try: + _location = ORMLocation.objects.get(name=attrs["location"]) + except ORMLocation.DoesNotExist: + _location = None + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vlan_group.location = _location + if "description" in attrs: + _update_vlan_group.description = attrs["description"] + if not check_sor_field(_update_vlan_group): + _update_vlan_group.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_vlan_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_vlan_group.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete VLANGroup in Nautobot from NautobotVLANGroup object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VLANGroup {self.uuid}") + try: + _vlan_group = ORMVLANGroup.objects.get(id=self.uuid) + super().delete() + _vlan_group.delete() + return self + except ORMRiR.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find VLANGroup {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete VLANGroup {self.name} due to existing references. Error: {err}." + ) + return self + + +class NautobotVLAN(VLAN): + """Nautobot implementation of Nautobot VLAN model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create VLAN in Nautobot from NautobotVLAN object.""" + adapter.job.logger.info(f'Creating Nautobot VLAN: {ids["name"]}') + try: + _vlan_group = ORMVLANGroup.objects.get(name=ids["vlan_group"]) + except ORMVLANGroup.DoesNotExist: + _vlan_group = None + if ids["vlan_group"]: + adapter.job.logger.warning( + f'Nautobot VLANGroup {ids["vlan_group"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + if "tags" in attrs: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_vlan = ORMVLAN( + name=ids["name"], + vid=ids["vid"], + vlan_group=_vlan_group, + status=_status, + role=_role, + tenant=_tenant, + description=attrs["description"], + ) + if attrs.get("tags"): + new_vlan.validated_save() + new_vlan.tags.clear() + for _tag in attrs["tags"]: + new_vlan.tags.add(ORMTag.objects.get(name=_tag)) + new_vlan.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_vlan.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_vlan.validated_save() + try: + if "locations" in attrs: + _locations = [] + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + except ORMLocation.DoesNotExist: + _location = None + adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + for _location in _locations: + new_vlan.locations.add(_location) + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VLAN in Nautobot from NautobotVLAN object.""" + self.adapter.job.logger.info(f"Updating Nautobot VLAN: {self.name}__{self.vid}") + _update_vlan = ORMVLAN.objects.get(id=self.uuid) + if "description" in attrs: + _update_vlan.description = attrs["description"] + if "status" in attrs: + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + self.adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + _update_vlan.status = _status + if "role" in attrs: + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + self.adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vlan.role = _role + if "tenant" in attrs: + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + self.adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vlan.tenant = _tenant + if "tags" in attrs: + try: + if "tags" in attrs: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if attrs.get("tags"): + _update_vlan.validated_save() + # TODO: Probably a better way to handle this that's easier on the database. + _update_vlan.tags.clear() + for _tag in attrs["tags"]: + _update_vlan.tags.add(ORMTag.objects.get(name=_tag)) + if "locations" in attrs: + # TODO: Probably a better way to handle this that's easier on the database. + _update_vlan.locations.clear() + try: + _locations = [] + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + except ORMLocation.DoesNotExist: + _location = None + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["location"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + for _location in _locations: + _update_vlan.locations.add(_location) + if not check_sor_field(_update_vlan): + _update_vlan.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_vlan.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_vlan.validated_save() + return super().update(attrs) + + def delete(self): + """Delete VLAN in Nautobot from NautobotVLAN object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VLAN {self.uuid}") + try: + _vlan = ORMVLAN.objects.get(id=self.uuid) + super().delete() + _vlan.delete() + return self + except ORMVLAN.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find VLAN {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete VLAN {self.name} due to existing references. Error: {err}." + ) + + +class NautobotVRF(VRF): + """Nautobot implementation of Nautobot VRF model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create VRF in Nautobot from NautobotVRF object.""" + adapter.job.logger.info(f'Creating Nautobot VRF: {ids["name"]}') + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _namespace = ORMNamespace.objects.get(name=ids["namespace"]) + except ORMNamespace.DoesNotExist: + _namespace = ORMNamespace.objects.get(name="Global") + adapter.job.logger.warning( + f'Nautobot Namespace {ids["namespace"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_vrf = ORMVRF( + name=ids["name"], + namespace=_namespace, + rd=attrs["route_distinguisher"], + tenant=_tenant, + description=attrs["description"], + ) + if attrs.get("tags"): + new_vrf.validated_save() + new_vrf.tags.clear() + for _tag in attrs["tags"]: + new_vrf.tags.add(ORMTag.objects.get(name=_tag)) + new_vrf.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_vrf.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_vrf.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update VRF in Nautobot from NautobotVRF object.""" + self.adapter.job.logger.info(f"Creating Nautobot VRF: {self.name}") + _update_vrf = ORMVRF.objects.get(id=self.uuid) + if "tenant" in attrs: + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + self.adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_vrf.tenant = _tenant + if "description" in attrs: + _update_vrf.description = attrs["description"] + if "route_distinguisher" in attrs: + _update_vrf.rd = attrs["route_distinguisher"] + if attrs.get("tags"): + _update_vrf.tags.clear() + for _tag in attrs["tags"]: + _update_vrf.tags.add(ORMTag.objects.get(name=_tag)) + if not check_sor_field(_update_vrf): + _update_vrf.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_vrf.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_vrf.validated_save() + return super().update(attrs) + + def delete(self): + """Delete VRF in Nautobot from NautobotVRF object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VRF {self.uuid}") + try: + _vrf = ORMVRF.objects.get(id=self.uuid) + super().delete() + _vrf.delete() + return self + except ORMVRF.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find VRF {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete VRF {self.name} due to existing references. Error: {err}." + ) + + +class NautobotPrefix(Prefix): + """Nautobot implementation of Nautobot Prefix model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Prefix in Nautobot from NautobotPrefix object.""" + adapter.job.logger.info(f'Creating Nautobot Prefix: {ids["network"]} in Namespace: {ids["namespace"]}') + try: + _namespace = ORMNamespace.objects.get(name=ids["namespace"]) + except ORMNamespace.DoesNotExist: + _namespace = ORMNamespace.objects.get(name="Global") + adapter.job.logger.warning( + f'Nautobot Namespace {ids["namespace"]} does not exist. Defaulting to Global Namespace.' + ) + try: + if attrs["vlan"]: + _vlan_name, _vlan_id, _vlan_group_name = attrs["vlan"].split("__", 2) + _vlan_group = ORMVLANGroup.objects.get(name=_vlan_group_name) + _vlan = ORMVLAN.objects.get( + name=_vlan_name, + vid=_vlan_id, + vlan_group=_vlan_group if _vlan_group != "None" else None, + ) + else: + _vlan = None + except ORMVLANGroup.DoesNotExist: + _vlan = None + if attrs["vlan"]: + adapter.job.logger.warning( + f'Nautobot VLANGroup {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVLAN.DoesNotExist: + _vlan = None + if attrs["vlan"]: + adapter.job.logger.warning( + f'Nautobot VLAN {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + _rir = ORMRiR.objects.get(name=attrs["rir"]) + except ORMRiR.DoesNotExist: + _rir = None + if attrs["rir"]: + adapter.job.logger.warning( + f'Nautobot RiR {attrs["rir"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + try: + if "tags" in attrs: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + new_prefix = ORMPrefix( + network=ids["network"].split("/")[0], + prefix_length=ids["network"].split("/")[1], + namespace=_namespace, + type=attrs["prefix_type"] if attrs["prefix_type"] else "Network", + status=_status, + role=_role, + rir=_rir, + tenant=_tenant, + date_allocated=attrs["date_allocated"], + description=attrs["description"], + vlan=_vlan, + ) + if attrs.get("tags"): + new_prefix.validated_save() + new_prefix.tags.clear() + for _tag in attrs["tags"]: + new_prefix.tags.add(ORMTag.objects.get(name=_tag)) + new_prefix.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_prefix.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_prefix.validated_save() + try: + if "locations" in attrs: + _locations = [] + if attrs["locations"]: + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + except ORMLocation.DoesNotExist: + _location = None + adapter.job.logger.warning( + f'Nautobot Location {attrs["locations"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + for _location in _locations: + new_prefix.locations.add(_location) + try: + if "vrfs" in attrs: + _vrfs = [] + if attrs["vrfs"]: + for _vrf in attrs["vrfs"]: + _vrf_name, _vrf_namespace = _vrf.split("__") + _namespace = ORMNamespace.objects.get(name=_vrf_namespace) + _vrfs.append(ORMVRF.objects.get(name=_vrf_name, namespace=_namespace)) + if _vrfs: + for _vrf in _vrfs: + adapter.job.logger.debug(f"Assigning VRF {_vrf} to Prefix {new_prefix}") + new_prefix.vrfs.add(_vrf) + except ORMNamespace.DoesNotExist: + _vrf = None + if attrs["vrfs"]: + adapter.job.logger.warning( + f'Nautobot Namespace {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVRF.DoesNotExist: + _vrf = None + adapter.job.logger.warning( + f'Nautobot VRF {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Prefix in Nautobot from NautobotPrefix object.""" + self.adapter.job.logger.info(f"Creating Nautobot Prefix: {self.network} in Namespace: {self.namespace}") + _update_prefix = ORMPrefix.objects.get(id=self.uuid) + if "prefix_type" in attrs: + _update_prefix.prefix_type = attrs["prefix_type"] + if "vlan" in attrs: + try: + if attrs["vlan"]: + _vlan_name, _vlan_id, _vlan_group_name = attrs["vlan"].split("__", 2) + _vlan_group = ORMVLANGroup.objects.get(name=_vlan_group_name) + _vlan = ORMVLAN.objects.get( + name=_vlan_name, + vid=_vlan_id, + vlan_group=_vlan_group if _vlan_group != "None" else None, + ) + else: + _vlan = None + except ORMVLANGroup.DoesNotExist: + _vlan = None + if attrs["vlan"]: + self.adapter.job.logger.warning( + f'Nautobot VLANGroup {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVLAN.DoesNotExist: + _vlan = None + if attrs["vlan"]: + self.adapter.job.logger.warning( + f'Nautobot VLAN {attrs["vlan"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.vlan = _vlan + if "status" in attrs: + try: + _status = ORMStatus.objects.get(name=attrs["status"]) + except ORMStatus.DoesNotExist: + _status = ORMStatus.objects.get(name="Active") + self.adapter.job.logger.warning( + f'Nautobot Status {attrs["status"]} does not exist. Make sure it is created manually or defined in global_settings.yaml. Defaulting to Status Active.' + ) + _update_prefix.status = _status + if "role" in attrs: + try: + _role = ORMRole.objects.get(name=attrs["role"]) + except ORMRole.DoesNotExist: + _role = None + if attrs["role"]: + self.adapter.job.logger.warning( + f'Nautobot Role {attrs["role"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.role = _role + if "tenant" in attrs: + try: + _tenant = ORMTenant.objects.get(name=attrs["tenant"]) + except ORMTenant.DoesNotExist: + _tenant = None + if attrs["tenant"]: + self.adapter.job.logger.warning( + f'Nautobot Tenant {attrs["tenant"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.tenant = _tenant + if "rir" in attrs: + try: + _rir = ORMRiR.objects.get(name=attrs["rir"]) + except ORMRiR.DoesNotExist: + _rir = None + if attrs["rir"]: + self.adapter.job.logger.warning( + f'Nautobot RiR {attrs["rir"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + _update_prefix.rir = _rir + if "tags" in attrs: + try: + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + except ORMTag.DoesNotExist: + self.adapter.job.logger.warning( + f'Nautobot Tag {attrs["tags"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if "date_allocated" in attrs: + _update_prefix.date_allocated = attrs["date_allocated"] + if attrs.get("tags"): + _update_prefix.validated_save() + _update_prefix.tags.clear() + for _tag in attrs["tags"]: + _update_prefix.tags.add(ORMTag.objects.get(name=_tag)) + if "locations" in attrs: + try: + _locations = [] + if attrs["locations"]: + for _location in attrs["locations"]: + _locations.append(ORMLocation.objects.get(name=_location)) + else: + _update_prefix.locations.clear() + except ORMLocation.DoesNotExist: + _location = None + self.adapter.job.logger.warning( + f'Nautobot Location {attrs["locations"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if _locations: + _update_prefix.locations.clear() + for _location in _locations: + _update_prefix.locations.add(_location) + if "vrfs" in attrs: + try: + _vrfs = [] + if attrs["vrfs"]: + for _vrf in attrs["vrfs"]: + _vrf_name, _vrf_namespace = _vrf.split("__") + _namespace = ORMNamespace.objects.get(name=_vrf_namespace) + _vrfs.append(ORMVRF.objects.get(name=_vrf_name, namespace=_namespace)) + else: + _update_prefix.vrfs.clear() + if _vrfs: + for _vrf in _vrfs: + _update_prefix.vrfs.clear() + self.adapter.job.logger.debug(f"Assigning VRF {_vrf} to Prefix {_update_prefix}") + _update_prefix.vrfs.add(_vrf) + except ORMNamespace.DoesNotExist: + _vrf = None + if attrs["vrfs"]: + self.adapter.job.logger.warning( + f'Nautobot Namespace {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + except ORMVRF.DoesNotExist: + _vrf = None + self.adapter.job.logger.warning( + f'Nautobot VRF {attrs["vrfs"]} does not exist. Make sure it is created manually or defined in global_settings.yaml' + ) + if not check_sor_field(_update_prefix): + _update_prefix.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_prefix.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_prefix.validated_save() + + return super().update(attrs) + + def delete(self): + """Delete Prefix in Nautobot from NautobotPrefix object.""" + self.adapter.job.logger.debug(f"Delete Nautobot VRF {self.uuid}") + try: + _prefix = ORMPrefix.objects.get(id=self.uuid) + super().delete() + _prefix.delete() + return self + except ORMPrefix.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Prefix {self.uuid} for deletion. {err}") + except ProtectedError as err: + self.adapter.job.logger.warning( + f"Unable to delete Prefix {self.name} due to existing references. Error: {err}." + ) + + +class NautobotSecret(Secret): + """Nautobot implementation of Bootstrap Secret model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Secret in Nautobot from NautobotSecret object.""" + adapter.job.logger.info(f'Creating Nautobot Secret: {ids["name"]}') + new_secret = ORMSecret(name=ids["name"], provider=attrs["provider"], parameters=attrs["parameters"]) + new_secret.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_secret.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_secret.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Secret in Nautobot from NautobotSecret object.""" + _update_secret = ORMSecret.objects.get(id=self.uuid) + if "provider" in attrs: + _update_secret.provider = attrs["provider"] + if "parameters" in attrs: + _update_secret.parameters["variable"] = attrs["parameters"]["variable"] + _update_secret.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + if not check_sor_field(_update_secret): + _update_secret.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_secret.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Secret in Nautobot from NautobotSecret object.""" + self.adapter.job.logger.debug(f"Delete secret uuid: {self.uuid}") + try: + secr = ORMSecret.objects.get(id=self.uuid) + super().delete() + secr.delete() + return self + except ORMSecret.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Secret {self.uuid} for deletion. {err}") + + +class NautobotSecretsGroup(SecretsGroup): + """Nautobot implementation of Bootstrap SecretsGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create SecretsGroup in Nautobot from NautobotSecretsGroup object.""" + adapter.job.logger.info(f'Creating Nautobot Secrets Group: {ids["name"]}') + _new_secrets_group = ORMSecretsGroup(name=ids["name"]) + _new_secrets_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_secrets_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_secrets_group.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + _group = ORMSecretsGroup.objects.get(name=ids["name"]) + for _secret in attrs["secrets"]: + try: + _orm_secret = ORMSecret.objects.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + adapter.job.logger.info(f'Secret {_secret["name"]} does not exist in Nautobot, ensure it is created.') + try: + _group.secrets.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + _group.secrets.add(_orm_secret) + _group.validated_save() + _sga = _group.secretsgroupassociation_set.get(secret_id=_orm_secret.id) + _sga.access_type = _secret["access_type"] + _sga.secret_type = _secret["secret_type"] + _sga.validated_save() + + def update(self, attrs): + """Update SecretsGroup in Nautobot from NautobotSecretsGroup object.""" + self.adapter.job.logger.info(f"Updating SecretsGroup {self.name}") + _update_group = ORMSecretsGroup.objects.get(name=self.name) + if "secrets" in attrs: + for _secret in attrs["secrets"]: + try: + _orm_secret = ORMSecret.objects.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + self.adapter.job.logger.info( + f'Secret {_secret["name"]} does not exist in Nautobot, ensure it is created.' + ) + try: + _update_group.secrets.get(name=_secret["name"]) + except ORMSecret.DoesNotExist: + _sga = ORMSecretsGroupAssociation( + secrets_group=_update_group, + secret=_orm_secret, + access_type=_secret["access_type"], + secret_type=_secret["secret_type"], + ) + _sga.validated_save() + if not check_sor_field(_update_group): + _update_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_group.validated_save() + return super().update(attrs) + + def delete(self): + """Delete SecretsGroup in Nautobot from NautobotSecretsGroup object.""" + self.adapter.job.logger.debug(f"Delete SecretsGroup uuid: {self.uuid}") + try: + secr = ORMSecretsGroup.objects.get(id=self.uuid) + super().delete() + secr.delete() + return self + except ORMSecretsGroup.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find SecretsGroup {self.uuid} for deletion. {err}") + + +class NautobotGitRepository(GitRepository): + """Nautobot implementation of Bootstrap GitRepository model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create GitRepository in Nautobot from NautobotGitRepository object.""" + adapter.job.logger.info(f'Creating Nautobot Git Repository: {ids["name"]}') + _secrets_group = None + if attrs.get("secrets_group"): + _secrets_group = ORMSecretsGroup.objects.get(name=attrs["secrets_group"]) + new_gitrepository = ORMGitRepository( + name=ids["name"], + remote_url=attrs["url"], + branch=attrs["branch"], + secrets_group=_secrets_group, + provided_contents=attrs["provided_contents"], + ) + new_gitrepository.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + new_gitrepository.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + new_gitrepository.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GitRepository in Nautobot from NautobotSecretsGroup object.""" + self.adapter.job.logger.info(f"Updating GitRepository {self.name}") + _update_git_repo = ORMGitRepository.objects.get(name=self.name) + if attrs.get("url"): + _update_git_repo.remote_url = attrs["url"] + if attrs.get("branch"): + _update_git_repo.branch = attrs["branch"] + if attrs.get("secrets_group"): + _secrets_group = ORMSecretsGroup.objects.get(name=attrs["secrets_group"]) + _update_git_repo.secrets_group = _secrets_group + if attrs.get("provided_contents"): + _update_git_repo.provided_contents = attrs["provided_contents"] + if not check_sor_field(_update_git_repo): + _update_git_repo.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_git_repo.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_git_repo.validated_save() + return super().update(attrs) + + def delete(self): + """Delete GitRepository in Nautobot from NautobotGitRepository object.""" + self.adapter.job.logger.debug(f"Delete GitRepository uuid: {self.uuid}") + try: + git_repo = ORMGitRepository.objects.get(id=self.uuid) + super().delete() + git_repo.delete() + return self + except ORMGitRepository.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find GitRepository {self.uuid} for deletion. {err}") + + +class NautobotDynamicGroup(DynamicGroup): + """Nautobot implementation of Bootstrap DynamicGroup model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create DynamicGroup in Nautobot from NautobotDynamicGroup object.""" + adapter.job.logger.info(f'Creating Nautobot Dynamic Group: {ids["name"]}') + _content_type_id = lookup_content_type_id(nb_model="dynamic_groups", model_path=ids["content_type"]) + if _content_type_id is None: + adapter.job.logger.warning( + f'Could not find ContentType for {ids["label"]} with ContentType {ids["content_type"]}' + ) + _content_type = ContentType.objects.get_for_id(id=_content_type_id) + _new_nb_dg = ORMDynamicGroup( + name=ids["name"], + content_type=_content_type, + filter=attrs["dynamic_filter"], + description=attrs["description"], + ) + _new_nb_dg.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_nb_dg.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + + try: + _new_nb_dg.validated_save() + except ValidationError: + if attrs.get("dynamic_filter"): + _new_nb_dg.filter = attrs["dynamic_filter"] + if attrs.get("description"): + _new_nb_dg.description = attrs["description"] + _new_nb_dg.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_nb_dg.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update DynamicGroups in Nautobot from NautobotDynamicGroup object.""" + self.adapter.job.logger.info(f"Updating DynamicGroup {self.name}") + _update_dyn_group = ORMDynamicGroup.objects.get(name=self.name) + if attrs.get("dynamic_filter"): + _update_dyn_group.filter = attrs["dynamic_filter"] + if attrs.get("description"): + _update_dyn_group.description = attrs["description"] + if not check_sor_field(_update_dyn_group): + _update_dyn_group.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_dyn_group.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_dyn_group.validated_save() + return super().update(attrs) + + def delete(self): + """Delete DynamicGroup in Nautobot from NautobotDynamicGroup object.""" + self.adapter.job.logger.debug(f"Delete DynamicGroup uuid: {self.name}") + try: + dyn_group = ORMDynamicGroup.objects.get(name=self.name) + super().delete() + dyn_group.delete() + return self + except ORMDynamicGroup.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find DynamicGroup {self.name} for deletion. {err}") + + +class NautobotComputedField(ComputedField): + """Nautobot implementation of Bootstrap ComputedField model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create ComputedField in Nautobot from NautobotComputedField object.""" + adapter.job.logger.info(f'Creating Nautobot Computed Field: {ids["label"]}') + _content_type_id = lookup_content_type_id(nb_model="custom_fields", model_path=attrs["content_type"]) + if _content_type_id is None: + adapter.job.logger.warning( + f'Could not find ContentType for {ids["label"]} with ContentType {attrs["content_type"]}' + ) + _content_type = ContentType.objects.get_for_id(id=_content_type_id) + _new_computed_field = ORMComputedField( + label=ids["label"], content_type=_content_type, template=attrs["template"] + ) + _new_computed_field.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ComputedField in Nautobot from NautobotComputedField object.""" + self.adapter.job.logger.info(f"Updating ComputedField {self.label}") + comp_field = ORMComputedField.objects.get(label=self.label) + if attrs.get("content_type"): + _content_type_id = lookup_content_type_id(nb_model="custom_fields", model_path=attrs["content_type"]) + if _content_type_id is None: + self.adapter.job.logger.warning( + f'Could not find ContentType for {self["label"]} with ContentType {attrs["content_type"]}' + ) + _content_type = ContentType.objects.get_for_id(id=_content_type_id) + comp_field.content_type = _content_type + if attrs.get("template"): + comp_field.template = attrs["template"] + comp_field.validated_save() + return super().update(attrs) + + def delete(self): + """Delete ComputedField in Nautobot from NautobotComputedField object.""" + self.adapter.job.logger.debug(f"Delete ComputedField: {self.label}") + try: + comp_field = ORMComputedField.objects.get(label=self.label) + super().delete() + comp_field.delete() + return self + except ORMComputedField.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ComputedField {self.label} for deletion. {err}") + + +class NautobotTag(Tag): + """Nautobot implementation of Bootstrap Tag model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Tag in Nautobot from NautobotTag object.""" + _content_types = [] + adapter.job.logger.info(f'Creating Nautobot Tag: {ids["name"]}') + for _model in attrs["content_types"]: + adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _new_tag = ORMTag( + name=ids["name"], + color=attrs["color"], + description=attrs["description"], + ) + _new_tag.validated_save() + _new_tag.content_types.set(_content_types) + _new_tag.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_tag.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_tag.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Tag in Nautobot from NautobotTag object.""" + self.adapter.job.logger.info(f"Updating Tag {self.name}") + _update_tag = ORMTag.objects.get(name=self.name) + if attrs.get("color"): + _update_tag.color = attrs["color"] + if attrs.get("content_types"): + _content_types = [] + for _model in attrs["content_types"]: + self.adapter.job.logger.debug(f"Looking up {_model} in content types.") + _content_types.append(lookup_content_type_for_taggable_model_path(_model)) + _update_tag.content_types.set(_content_types) + if attrs.get("description"): + _update_tag.description = attrs["description"] + if not check_sor_field(_update_tag): + _update_tag.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _update_tag.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_tag.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Tag in Nautobot from NautobotTag object.""" + self.adapter.job.logger.debug(f"Delete Tag: {self.name}") + try: + _tag = ORMTag.objects.get(name=self.name) + super().delete() + _tag.delete() + return self + except ORMTag.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find Tag {self.name} for deletion. {err}") + + +class NautobotGraphQLQuery(GraphQLQuery): + """Nautobot implementation of Bootstrap GraphQLQuery model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create GraphQLQuery in Nautobot from NautobotGraphQLQuery object.""" + adapter.job.logger.info(f'Creating Nautobot GraphQLQuery: {ids["name"]}') + _new_query = ORMGraphQLQuery(name=ids["name"], query=attrs["query"]) + _new_query.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update GraphQLQuery in Nautobot from NautobotGraphQLQuery object.""" + self.adapter.job.logger.info(f"Updating GraphQLQuery: {self.name}.") + _query = ORMGraphQLQuery.objects.get(name=self.name) + if attrs.get("query"): + _query.query = attrs["query"] + _query.validated_save() + return super().update(attrs) + + def delete(self): + """Delete GraphQLQuery in Nautobot from NautobotGraphQLQuery object.""" + self.adapter.job.logger.debug(f"Delete GraphQLQuery: {self.name}") + try: + _query = ORMGraphQLQuery.objects.get(name=self.name) + super().delete() + _query.delete() + return self + except ORMGraphQLQuery.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find GraphQLQuery {self.name} for deletion. {err}") + + +if LIFECYCLE_MGMT: + + class NautobotSoftware(Software): + """Nautobot implementation of Bootstrap Software model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Software in Nautobot from NautobotSoftware object.""" + adapter.job.logger.info(f'Creating Nautobot Software object {ids["platform"]} - {ids["version"]}.') + _tags = [] + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + _platform = ORMPlatform.objects.get(name=ids["platform"]) + _new_software = ORMSoftware( + version=ids["version"], + alias=attrs["alias"], + device_platform=_platform, + end_of_support=attrs["eos_date"], + long_term_support=attrs["long_term_support"], + pre_release=attrs["pre_release"], + documentation_url=attrs["documentation_url"], + ) + if attrs.get("tags"): + _new_software.validated_save() + _new_software.tags.clear() + for tag in attrs["tags"]: + _new_software.tags.add(ORMTag.objects.get(name=tag)) + _new_software.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_software.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_software.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Software in Nautobot from NautobotSoftware object.""" + self.adapter.job.logger.info(f"Updating Software: {self.platform} - {self.version}.") + _tags = [] # noqa: F841 + _platform = ORMPlatform.objects.get(name=self.platform) + _update_software = ORMSoftware.objects.get(version=self.version, device_platform=_platform) + if "alias" in attrs: + _update_software.alias = attrs["alias"] + if attrs.get("release_date"): + _update_software.release_date = attrs["release_date"] + if attrs.get("eos_date"): + _update_software.end_of_support = attrs["eos_date"] + if attrs.get("long_term_support"): + _update_software.long_term_support = attrs["long_term_support"] + if attrs.get("pre_release"): + _update_software.pre_release = attrs["pre_release"] + if attrs.get("documentation_url"): + _update_software.documentation_url = attrs["documentation_url"] + if not attrs.get("documentation_url"): + if attrs.get("documentation_url") == "": + _update_software.documentation_url = "" + if attrs.get("tags"): + _update_software.tags.clear() + for tag in attrs["tags"]: + _update_software.tags.add(ORMTag.objects.get(name=tag)) + if not check_sor_field(_update_software): + _update_software.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_software.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_software.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Software in Nautobot from NautobotSoftware object.""" + try: + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.version, device_platform=_platform) + super().delete() + _software.delete() + return self + except ORMSoftware.DoesNotExist as err: + self.adapter.job.logger.warning( + f"Unable to find Software {self.platform} - {self.version} for deletion. {err}" + ) + + class NautobotSoftwareImage(SoftwareImage): + """Nautobot implementation of Bootstrap SoftwareImage model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create SoftwareImage in Nautobot from NautobotSoftwareImage object.""" + _tags = [] + if attrs["tags"] is not None: + for tag in attrs["tags"]: + _tags.append(ORMTag.objects.get(name=tag)) + _platform = ORMPlatform.objects.get(name=attrs["platform"]) + _software = ORMSoftware.objects.get(version=attrs["software_version"], device_platform=_platform) + _new_soft_image = ORMSoftwareImage( + software=_software, + image_file_name=attrs["file_name"], + image_file_checksum=attrs["image_file_checksum"], + hashing_algorithm=attrs["hashing_algorithm"], + download_url=attrs["download_url"], + default_image=attrs["default_image"], + ) + if attrs.get("tags"): + _new_soft_image.validated_save() + _new_soft_image.tags.clear() + for tag in attrs["tags"]: + _new_soft_image.tags.add(ORMTag.objects.get(name=tag)) + _new_soft_image.custom_field_data.update({"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")}) + _new_soft_image.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _new_soft_image.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update SoftwareImage in Nautobot from NautobotSoftwareImage object.""" + self.adapter.job.logger.info(f"Updating Software Image: {self.platform} - {self.software_version}.") + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _update_soft_image = ORMSoftwareImage.objects.get(software=_software) + if attrs.get("platform"): + _update_soft_image.platform = _platform + if attrs.get("software_version"): + _update_soft_image.software_version = attrs["software_version"] + if attrs.get("file_name"): + _update_soft_image.image_file_name = attrs["file_name"] + if attrs.get("image_file_checksum"): + _update_soft_image.image_file_checksum = attrs["image_file_checksum"] + if attrs.get("hashing_algorithm"): + _update_soft_image.hashing_algorithm = attrs["hashing_algorithm"] + if attrs.get("download_url"): + _update_soft_image.download_url = attrs["download_url"] + if attrs.get("default_image"): + _update_soft_image.default_image = attrs["default_image"] + if attrs.get("tags"): + _update_soft_image.tags.clear() + if attrs["tags"] is not None: + for tag in attrs["tags"]: + _update_soft_image.tags.add(ORMTag.objects.get(name=tag)) + if not check_sor_field(_update_soft_image): + _update_soft_image.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_soft_image.custom_field_data.update({"last_synced_from_sor": datetime.today().date().isoformat()}) + _update_soft_image.validated_save() + return super().update(attrs) + + def delete(self): + """Delete SoftwareImage in Nautobot from NautobotSoftwareImage object.""" + try: + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _soft_image = ORMSoftwareImage.objects.get(software=_software) + super().delete() + _soft_image.delete() + return self + except ORMSoftwareImage.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find SoftwareImage {self.software} for deletion. {err}") + + class NautobotValidatedSoftware(ValidatedSoftware): + """Nautobot implementation of Bootstrap ValidatedSoftware model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create ValidatedSoftware in Nautobot from NautobotValidatedSoftware object.""" + _devices = [] # noqa: F841 + _device_types = [] # noqa: F841 + _device_roles = [] # noqa: F841 + _inventory_items = [] # noqa: F841 + _object_tags = [] # noqa: F841 + _platform = ORMPlatform.objects.get(name=attrs["platform"]) + _software = ORMSoftware.objects.get(version=attrs["software_version"], device_platform=_platform) + _new_validated_software = ORMValidatedSoftware( + software=_software, + start=ids["valid_since"] if not None else datetime.today().date(), + end=ids["valid_until"], + preferred=attrs["preferred_version"], + ) + _new_validated_software.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _new_validated_software.custom_field_data.update( + {"last_synced_from_sor": datetime.today().date().isoformat()} + ) + _new_validated_software.validated_save() + if "devices" in attrs: + if attrs["devices"]: + for _dev in attrs["devices"]: + _devices.append(ORMDevice.objects.get(name=_dev)) + _new_validated_software.devices.set(_devices) + if "device_types" in attrs: + if attrs["device_types"]: + for _dev_type in attrs["device_types"]: + _device_types.append(ORMDeviceType.objects.get(model=_dev_type)) + _new_validated_software.device_types.set(_device_types) + if "device_roles" in attrs: + if attrs["device_roles"]: + for _dev_role in attrs["device_roles"]: + _device_roles.append(ORMRole.objects.get(name=_dev_role)) + _new_validated_software.device_roles.set(_device_roles) + if "inventory_items" in attrs: + if attrs["inventory_items"]: + for _inv_item in attrs["inventory_items"]: + _inventory_items.append(ORMInventoryItem.objects.get(name=_inv_item)) + _new_validated_software.inventory_items.set(_inventory_items) + if "object_tags" in attrs: + if attrs["object_tags"]: + for _obj_tag in attrs["object_tags"]: + _object_tags.append(ORMTag.objects.get(name=_obj_tag)) + _new_validated_software.object_tags.set(_object_tags) + if "tags" in attrs: + if attrs["tags"] is not None: + for _tag in attrs["tags"]: + _new_validated_software.tags.add(ORMTag.objects.get(name=_tag)) + _new_validated_software.validated_save() + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update ValidatedSoftware in Nautobot from NautobotValidatedSoftware object.""" + self.adapter.job.logger.info(f"Updating Validated Software - {self} with attrs {attrs}.") + _tags = [] # noqa: F841 + _devices = [] # noqa: F841 + _device_types = [] # noqa: F841 + _device_roles = [] # noqa: F841 + _inventory_items = [] # noqa: F841 + _object_tags = [] # noqa: F841 + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _update_validated_software = ORMValidatedSoftware.objects.get( + software=_software, start=self.valid_since, end=self.valid_until + ) + if attrs.get("preferred_version"): + _update_validated_software.preferred_version = attrs["preferred_version"] + if "tags" in attrs: + _update_validated_software.tags.clear() + if attrs["tags"] is not None: + for _tag in attrs["tags"]: + _update_validated_software.tags.add(ORMTag.objects.get(name=_tag)) + if "devices" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.devices.clear() + if attrs["devices"]: + for _dev in attrs["devices"]: + _devices.append(ORMDevice.objects.get(name=_dev)) + _update_validated_software.devices.set(_devices) + if "device_types" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.device_types.clear() + if attrs["device_types"]: + for _dev_type in attrs["device_types"]: + _device_types.append(ORMDeviceType.objects.get(model=_dev_type)) + _update_validated_software.device_types.set(_device_types) + if "device_roles" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.device_roles.clear() + if attrs["device_roles"]: + for _dev_role in attrs["device_roles"]: + _device_roles.append(ORMRole.objects.get(name=_dev_role)) + _update_validated_software.device_roles.set(_device_roles) + if "inventory_items" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.inventory_items.clear() + if attrs["inventory_items"]: + for _inv_item in attrs["inventory_items"]: + _inventory_items.append(ORMInventoryItem.objects.get(name=_inv_item)) + _update_validated_software.inventory_items.set(_inventory_items) + if "object_tags" in attrs: + # FIXME: There might be a better way to handle this that's easier on the database. + _update_validated_software.object_tags.clear() + if attrs["object_tags"]: + for _obj_tag in attrs["object_tags"]: + _object_tags.append(ORMTag.objects.get(name=_obj_tag)) + _update_validated_software.object_tags.set(_object_tags) + if not check_sor_field(_update_validated_software): + _update_validated_software.custom_field_data.update( + {"system_of_record": os.getenv("SYSTEM_OF_RECORD", "Bootstrap")} + ) + _update_validated_software.custom_field_data.update( + {"last_synced_from_sor": datetime.today().date().isoformat()} + ) + _update_validated_software.validated_save() + return super().update(attrs) + + def delete(self): + """Delete ValidatedSoftware in Nautobot from NautobotValidatedSoftware object.""" + try: + _platform = ORMPlatform.objects.get(name=self.platform) + _software = ORMSoftware.objects.get(version=self.software_version, device_platform=_platform) + _validated_software = ORMValidatedSoftware.objects.get( + software=_software, start=self.valid_since, end=self.valid_until + ) + super().delete() + _validated_software.delete() + return self + except ORMValidatedSoftware.DoesNotExist as err: + self.adapter.job.logger.warning(f"Unable to find ValidatedSoftware {self} for deletion. {err}") diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/develop.yml b/nautobot_ssot/integrations/bootstrap/fixtures/develop.yml new file mode 100644 index 000000000..893c92c05 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/develop.yml @@ -0,0 +1,2 @@ +--- +git_branch: "develop" diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml b/nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml new file mode 100755 index 000000000..f938033a0 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/global_settings.yml @@ -0,0 +1,794 @@ +--- +tenant_group: + - name: "Group1" + parent: "" + description: "" + - name: "Group2" + parent: "" + description: "" + - name: "Group3" + parent: "Group1" + description: "" +tenant: + - name: "Backbone" + tenant_group: "Group1" + description: "" + tags: [] + - name: "Datacenter" + tenant_group: "Group2" + description: "" + tags: ["Test"] +role: + - name: "spine_switches" + weight: + description: "" + color: "795548" + content_types: + - "dcim.device" + - name: "leaf_switches" + weight: + description: "" + color: "785530" + content_types: + - "dcim.device" + - name: "Switch" + weight: + description: "" + color: "9e9e9e" + content_types: + - "dcim.device" + - name: "Firewall" + weight: + description: "" + color: "9e9e9e" + content_types: + - "dcim.device" + - name: "Data Network" + weight: + description: "" + color: "9e9e9e" + content_types: + - "ipam.prefix" + - "ipam.vlan" + # Default Roles + - name: "Administrative" + weight: + description: "Unit plays an administrative role" + color: "2196f3" + content_types: + - "extras.contactassociation" + - name: "Anycast" + weight: + description: "" + color: "ffc107" + content_types: + - "ipam.ipaddress" + - name: "Billing" + weight: + description: "Unit plays a billing role" + color: "4caf50" + content_types: + - "extras.contactassociation" + - name: "CARP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "GLBP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "HSRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "Loopback" + weight: + description: "" + color: "9e9e9e" + content_types: + - "ipam.ipaddress" + - name: "On Site" + weight: + description: "Unit plays an on site role" + color: "111111" + content_types: + - "extras.contactassociation" + - name: "Secondary" + weight: + description: "" + color: "2196f3" + content_types: + - "ipam.ipaddress" + - name: "Support" + weight: + description: "Unit plays a support role" + color: "ffeb3b" + content_types: + - "extras.contactassociation" + - name: "VIP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" + - name: "VRRP" + weight: + description: "" + color: "4caf50" + content_types: + - "ipam.ipaddress" +manufacturer: + - name: "Generic" + description: "For generic devices like patch panels" + - name: "Palo Alto Networks" + description: "" + - name: "Arista" + description: "" + - name: "Cisco" + description: "" +platform: + - name: "paloalto_panos" + manufacturer: "Palo Alto Networks" + network_driver: "paloalto_panos" + napalm_driver: "" + napalm_arguments: {} + description: "PanOS Firewalls" + - name: "cisco_ios" + manufacturer: "Cisco" + network_driver: "cisco_ios" + napalm_driver: "" + napalm_arguments: {} + description: "Cisco Devices" + - name: "arista_eos" + manufacturer: "Arista" + network_driver: "arista_eos" + napalm_driver: "" + napalm_arguments: {} + description: "Arista Devices" +location_type: + - name: "Region" + parent: "" + nestable: true + description: "" + content_types: [] + - name: "Site" + parent: "Region" + nestable: false + description: "" + content_types: + - "dcim.device" + - "ipam.namespace" + - "ipam.prefix" + - "ipam.vlan" + - "ipam.vlangroup" + - "circuits.circuittermination" + - name: "Building" + parent: "Site" + nestable: false + description: "" + content_types: + - "dcim.device" + - "ipam.namespace" + - "ipam.prefix" + - "ipam.vlan" + - "ipam.vlangroup" + - "circuits.circuittermination" +location: + - name: "Southeast" + location_type: "Region" + parent: "" + status: "Active" + facility: "" + asn: + time_zone: "US/Eastern" + description: "" + tenant: "" + physical_address: "" + shipping_address: "" + latitude: + longitude: + contact_name: "" + contact_phone: "" + contact_email: "" + tags: [] + - name: "Atlanta" + location_type: "Site" + parent: "Southeast" + status: "Active" + facility: "AT1" + asn: 65001 + time_zone: "US/Eastern" + description: "" + tenant: "" + physical_address: | + 180 Peachtree St NE + FL 2 , FL 3 , FL 6 + Atlanta, GA 30303 + United States + shipping_address: | + Example Company + 180 Peachtree St NE + Loading Dock 1 + Atlanta, GA 30303 + United States + latitude: + longitude: + contact_name: "" + contact_phone: "" + contact_email: "" + tags: [] + - name: "Atlanta4" + location_type: "Site" + parent: "Southeast" + status: "Active" + facility: "AT4" + asn: 65004 + time_zone: "US/Eastern" + description: "" + tenant: "" + physical_address: | + 450 Interstate to N PKWY + Atlanta, GA 30339 + United States + shipping_address: | + Example Company + 450 Interstate to N PKWY + Loading Dock 1 + Atlanta, GA 30339 + United States + latitude: + longitude: + contact_name: "" + contact_phone: "" + contact_email: "" + tags: [] +team: + - name: "Datacenter" + phone: "123-456-7890" + email: "datacenter@example.com" + address: "2715 N Vermont Canyon Rd, Los Angeles, CA 90027" + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts: [] + - name: "Backbone" + phone: "123-456-7890" + email: "backbone@example.com" + address: "1600 S Azusa Ave, Rowland Heights, CA 91748" + # TODO: Need to consider how to allow loading from teams or contacts models. + # contacts: [] +contact: + - name: "Jennifer Parker" + phone: "888-555-4823" + email: "jenny@future.com" + address: "12417 Philadelphia St, Whittier, CA 90601" + teams: + - "Backbone" + - "Datacenter" + - name: "Marty McFly" + phone: "888-555-1955" + email: "marty@future.com" + address: "9303 Roslyndale Ave, Arleta, CA 91331" + teams: + - "Backbone" +provider: + - name: "Provider1" + asn: 65000 + account_number: "12345678" + portal_url: "https://provider1.com" + noc_contact: "" + admin_contact: "" + tags: [] + - name: "Provider2" + asn: 65001 + account_number: "87654321" + portal_url: "https://provider2.com" + noc_contact: "" + admin_contact: "" + tags: [] +provider_network: + - name: "Provider1 Metro-E" + provider: "Provider1" + description: "" + comments: "" + tags: [] + - name: "Provider2 Metro-E" + provider: "Provider2" + description: "" + comments: "" + tags: [] +circuit_type: + - name: "Metro-E" + description: "Metro ethernet" + - name: "DWDM" + description: "" + - name: "Internet" + description: "" +circuit: + - circuit_id: "METRO-65002-CUST1" + provider: "Provider1" + circuit_type: "Metro-E" + status: "Active" + date_installed: + commit_rate_kbps: 1000000 + description: "" + tenant: "" + tags: [] + - circuit_id: "INTERNET-65002-CUST1" + provider: "Provider2" + circuit_type: "Internet" + status: "Active" + date_installed: + commit_rate_kbps: 1000000 + description: "" + tenant: "" + tags: [] +circuit_termination: + - name: "METRO-65002-CUST1__Provider1__A" + termination_type: "Location" + location: "Atlanta" + provider_network: "" + port_speed_kbps: 1000000 + upstream_speed_kbps: + cross_connect_id: "" + patch_panel_or_ports: "" + description: "" + tags: [] + - name: "METRO-65002-CUST1__Provider1__Z" + termination_type: "Provider Network" + location: "" + provider_network: "Provider2 Metro-E" + port_speed_kbps: 1000000 + upstream_speed_kbps: + cross_connect_id: "" + patch_panel_or_ports: "" + description: "" + tags: [] + - name: "INTERNET-65002-CUST1__Provider2__A" + termination_type: "Location" + location: "Atlanta4" + provider_network: "" + port_speed_kbps: 1000000 + upstream_speed_kbps: + cross_connect_id: "" + patch_panel_or_ports: "" + description: "" + tags: [] +secret: + - name: "Github_Service_Acct" + provider: "environment-variable" # or text-file + parameters: + variable: "GITHUB_SERVICE_ACCT" + path: + - name: "Github_Service_Token" + provider: "environment-variable" # or text-file + parameters: + variable: "GITHUB_SERVICE_TOKEN" + path: +secrets_group: + - name: "Github_Service_Account" + secrets: + - name: "Github_Service_Acct" + secret_type: "username" + access_type: "HTTP(S)" + - name: "Github_Service_Token" + secret_type: "token" + access_type: "HTTP(S)" +git_repository: + - name: "Backbone Config Contexts" + url: "https://github.com/nautobot/backbone-config-contexts.git" + branch: "main" + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" + - name: "Datacenter Config Contexts" + url: "https://github.com/nautobot/datacenter-config-contexts.git" + secrets_group_name: "Github_Service_Account" + provided_data_type: + - "config contexts" + - name: "Metro Config Contexts" + url: "https://github.com/nautobot/metro-config-contexts.git" + secrets_group_name: + provided_data_type: + - "config contexts" + - name: "Access Config Contexts" + url: "https://github.com/nautobot/access-config-contexts.git" + secrets_group_name: + provided_data_type: + - "config contexts" +dynamic_group: + - name: "Backbone Domain" + content_type: "dcim.device" + description: "" + filter: | + { + "tenant": [ + "Backbone" + ] + } + - name: "Datacenter" + content_type: "dcim.device" + description: "" + filter: | + { + "location": [ + "Atlanta" + ], + "platform": [ + "arista_eos", + "paloalto_panos" + ] + } +computed_field: + - label: "Compliance Change" + content_type: "dcim.device" + template: "{{ obj | get_change_log }}" +tag: + - name: "Backbone" + color: "795547" + description: "" + content_types: + - "dcim.device" + - name: "Access" + color: "795548" + description: "" + content_types: + - "dcim.device" + - "ipam.ipaddress" + - name: "Test" + color: "795548" + description: "Test" + content_types: + - "circuits.circuit" + - "circuits.circuittermination" + - "circuits.provider" + - "circuits.providernetwork" + - "dcim.cable" + - "dcim.consoleport" + - "dcim.consoleserverport" + - "dcim.device" + - "dcim.devicebay" + - "dcim.devicetype" + - "dcim.frontport" + - "dcim.interface" + - "dcim.inventoryitem" + - "dcim.powerfeed" + - "dcim.poweroutlet" + - "dcim.powerpanel" + - "dcim.powerport" + - "dcim.rack" + - "dcim.rackreservation" + - "dcim.rearport" + - "dcim.location" + - "dcim.deviceredundancygroup" + - "extras.gitrepository" + - "extras.job" + - "extras.secret" + - "ipam.namespace" + - "ipam.ipaddress" + - "ipam.prefix" + - "ipam.routetarget" + - "ipam.service" + - "ipam.vlan" + - "ipam.vrf" + - "tenancy.tenant" + - "virtualization.cluster" + - "virtualization.virtualmachine" + - "virtualization.vminterface" +graph_ql_query: + - name: "Backbone Devices" + query: | + query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + device_role { + name + } + tenant { + name + } + primary_ip4 { + address + } + } + } + - name: "Datacenter Devices" + query: | + query ($device_id: ID!) { + device(id: $device_id) { + config_context + hostname: name + device_role { + name + } + tenant { + name + } + primary_ip4 { + address + } + } + } +software: + - device_platform: "arista_eos" + version: "4.25.10M" + alias: "" + release_date: "2023-12-04" + eos_date: "2023-12-05" + documentation_url: "https://arista.com" + lts: false + pre_release: false + tags: ["Backbone"] + - device_platform: "cisco_ios" + version: "03.11.04.E" + alias: "Cisco Validated" + release_date: + eos_date: "2023-12-04" + documentation_url: "" + lts: false + pre_release: false + tags: ["Test"] + - device_platform: "paloalto_panos" + version: "11.0.3" + alias: "Panos Preferred" + release_date: + eos_date: "2024-12-04" + documentation_url: "https://paloaltonetworks.com" + lts: false + pre_release: false + tags: ["Test"] + - device_platform: "arista_eos" + version: "15.4.3" + alias: "Arista Preferred" + release_date: + eos_date: "2024-12-04" + documentation_url: "https://arista.com" + lts: false + pre_release: false + tags: ["Test"] +software_image: + - software: "arista_eos - 15.4.3" + platform: "arista_eos" + software_version: "15.4.3" + file_name: "arista15.4.3.bin" + download_url: "https://arista-files.com" + image_file_checksum: "" + hashing_algorithm: "" + default_image: false + tags: ["Test"] + - software: "paloalto_panos - 11.0.3" + platform: "paloalto_panos" + software_version: "11.0.3" + file_name: "paloalto_11.0.3.bin" + download_url: "https://paloaltonetworks.com" + image_file_checksum: "o234i09usdfsflkj" + hashing_algorithm: "SHA256" + default_image: false + tags: [] +validated_software: + - software: "arista_eos - 4.25.10M" + valid_since: 2023-08-07 + valid_until: 2025-01-01 + preferred_version: false + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: [] + tags: ["Test"] + - software: "cisco_ios - 03.11.04.E" + valid_since: 2023-08-07 + valid_until: + preferred_version: false + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: [] + tags: [] + - software: "paloalto_panos - 11.0.3" + valid_since: 2023-08-07 + valid_until: + preferred_version: false + devices: [] + device_types: [] + device_roles: ["Firewall"] + inventory_items: [] + object_tags: [] + tags: [] + - software: "arista_eos - 15.4.3" + valid_since: 2023-08-07 + valid_until: 2025-08-09 + preferred_version: true + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: ["Backbone"] + tags: ["Test"] + - software: "arista_eos - 15.4.3" + valid_since: 2023-08-07 + valid_until: + preferred_version: true + devices: [] + device_types: [] + device_roles: [] + inventory_items: [] + object_tags: [] + tags: [] +namespace: + - name: "Global" + description: "" + location: "" + - name: "Customer1" + description: "Customer1 IPAM Namespace" + location: "Atlanta" + - name: "Customer2" + description: "Customer2 IPAM Namespace" + location: "Atlanta4" +rir: + - name: "RFC1918" + private: true + description: "Private IP Space" + - name: "ARIN" + private: false + description: "American Registry for Internet Numbers" +vlan_group: + - name: "Atlanta VLANs" + location: "Atlanta" + description: "" + - name: "Atlanta4 VLANs" + location: "Atlanta4" + description: "" +vlan: + - name: "vlan10" + vid: 10 + description: "" + status: "Active" + role: "Data Network" + locations: ["Atlanta"] + vlan_group: "Atlanta VLANs" + tenant: "" + tags: [] + - name: "vlan20" + vid: 20 + description: "" + status: "Reserved" + role: "Data Network" + locations: ["Atlanta", "Atlanta4"] + vlan_group: "Atlanta VLANs" + tenant: "" + tags: [] + - name: "vlan30" + vid: 30 + description: "" + status: "Reserved" + role: "Data Network" + locations: [] + vlan_group: "Atlanta VLANs" + tenant: "" + tags: [] + - name: "vlan30" + vid: 30 + description: "" + status: "Active" + role: "" + locations: [] + vlan_group: "" + tenant: "" + tags: [] +vrf: + - name: "blue" + namespace: "Global" + route_distinguisher: "65000:1" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + # prefixes: [] + tenant: "" + tags: [] + - name: "red" + namespace: "Global" + route_distinguisher: "65000:2" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + # prefixes: [] + tenant: "" + tags: [] + - name: "blue" + namespace: "Customer1" + route_distinguisher: "65000:1" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + # prefixes: [] + tenant: "" + tags: [] +prefix: + - network: "10.0.0.0/24" + namespace: "Customer1" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: 2024-06-01 + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: [] + vlan: "" + tenant: "" + tags: [] + - network: "10.0.0.0/24" + namespace: "Customer2" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: "2024-06-01 12:00:00" + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: [] + vlan: "" + tenant: "" + tags: [] + - network: "10.0.10.0/24" + namespace: "Global" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: ["Atlanta", "Atlanta4"] + vlan: "vlan10__10__Atlanta VLANs" + tenant: "" + tags: [] + - network: "192.168.0.0/24" + namespace: "Customer1" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: ["blue__Customer1"] + locations: ["Atlanta"] + vlan: "" + tenant: "" + tags: [] + - network: "192.168.0.0/24" + namespace: "Global" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "Data Network" + rir: "RFC1918" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: ["red__Global"] + locations: ["Atlanta"] + vlan: "" + tenant: "" + tags: [] + - network: "192.168.1.0/24" + namespace: "" + prefix_type: "network" # network, container, or pool + status: "Active" + role: "" + rir: "" + date_allocated: + description: "" + # TODO: Need to consider how to allow loading from vrfs or prefix models. + vrfs: [] + locations: [] + vlan: "" + tenant: "" + tags: [] diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/production.yml b/nautobot_ssot/integrations/bootstrap/fixtures/production.yml new file mode 100644 index 000000000..e5cf4dc6b --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/production.yml @@ -0,0 +1,2 @@ +--- +git_branch: "production" diff --git a/nautobot_ssot/integrations/bootstrap/fixtures/staging.yml b/nautobot_ssot/integrations/bootstrap/fixtures/staging.yml new file mode 100644 index 000000000..c7915b439 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/fixtures/staging.yml @@ -0,0 +1,2 @@ +--- +git_branch: "staging" diff --git a/nautobot_ssot/integrations/bootstrap/jobs.py b/nautobot_ssot/integrations/bootstrap/jobs.py new file mode 100644 index 000000000..60d13e0f2 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/jobs.py @@ -0,0 +1,160 @@ +"""Jobs for bootstrap SSoT integration.""" + +import os + +from nautobot.apps.jobs import BooleanVar, ChoiceVar + +from nautobot_ssot.integrations.bootstrap.diffsync.adapters import bootstrap, nautobot +from nautobot_ssot.jobs.base import DataMapping, DataSource, DataTarget + +name = "Bootstrap SSoT" # pylint: disable=invalid-name + + +class BootstrapDataSource(DataSource): + """Bootstrap SSoT Data Source.""" + + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + load_source = ChoiceVar( + choices=( + ("file", "File"), + ("git", "Git"), + ("env_var", "Environment Variable"), + ), + description="Where to load the yaml files from", + label="Load Source", + default="env_var", + ) + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for bootstrap.""" + + name = "Bootstrap to Nautobot" + data_source = "Bootstrap" + data_target = "Nautobot" + description = "Sync information from Bootstrap to Nautobot" + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataSource.""" + return { + "Git loading source": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_LOAD_SOURCE"), + "Git branch": os.getenv("NAUTOBOT_BOOTSTRAP_SSOT_ENVIRONMENT_BRANCH"), + } + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return ( + DataMapping("tenant_group", "", "TenantGroup", "tenancy:tenant-groups"), + DataMapping("tenant", "", "Tenant", "tenancy:tenant"), + DataMapping("role", "", "Roles", "extras.roles"), + DataMapping("manufacturer", "", "Manufacturer", "dcim.manufacturer"), + DataMapping("platform", "", "Platform", "dcim.platform"), + DataMapping("location_type", "", "LocationType", "dcim.location-type"), + DataMapping("location", "", "Location", "dcim.location"), + DataMapping("secrets", "", "Secrets", "extras:secrets"), + DataMapping("secrets_groups", "", "SecretsGroup", "extras:secrets-groups"), + DataMapping("git_repositories", "", "GitRepository", "extras:git-repositories"), + DataMapping("dynamic_groups", "", "DynamicGroup", "extras:dynamic-groups"), + DataMapping("computed_field", "", "ComputedField", "extras:computed-field"), + DataMapping("tags", "", "Tag", "extras.tag"), + DataMapping("graphql_query", "", "GraphQLQuery", "extras:graphql-query"), + DataMapping("tenant_group", "", "TenantGroup", "tenancy:tenant-troup"), + DataMapping("tenant", "", "Tenant", "tenancy:tenant"), + DataMapping("role", "", "Role", "extras:role"), + DataMapping("manufacturer", "", "Manufacturer", "dcim.manufacturer"), + DataMapping("platform", "", "Platform", "dcim.platform"), + DataMapping("location_type", "", "LocationType", "dcim.location_type"), + DataMapping("location", "", "Location", "dcim.location"), + DataMapping("team", "", "Team", "extras.team"), + DataMapping("contact", "", "Contact", "extras.contact"), + DataMapping("provider", "", "Provider", "circuits.provider"), + DataMapping("provider_network", "", "ProviderNetwork", "circuits.provider_network"), + DataMapping("circuit_type", "", "CircuitType", "circuits.circuit_type"), + DataMapping("circuit", "", "Circuit", "circuits.circuit"), + DataMapping( + "circuit_termination", + "", + "CircuitTermination", + "circuits.circuit_termination", + ), + DataMapping("namespace", "", "Namespace", "ipam.namespcae"), + DataMapping("rir", "", "RIR", "ipam.rir"), + DataMapping("vlan_group", "", "VLANGroup", "ipam.vlan_group"), + DataMapping("vlan", "", "VLAN", "ipam.vlan"), + DataMapping("vrf", "", "VRF", "ipam.vrf"), + DataMapping("prefix", "", "Prefix", "ipam.prefix"), + ) + + def load_source_adapter(self): + """Load data from Bootstrap into DiffSync models.""" + self.source_adapter = bootstrap.BootstrapAdapter(job=self, sync=self.sync) + self.source_adapter.load() + + def load_target_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def run(self, load_source, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disable=arguments-differ + """Perform data synchronization.""" + self.debug = debug + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.load_source = load_source + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + + +class BootstrapDataTarget(DataTarget): + """bootstrap SSoT Data Target.""" + + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + read_destination = ChoiceVar( + choices=( + ("file", "File"), + ("git", "Git"), + ("env_var", "Environment Variable"), + ), + description="Where to load the YAML files from", + label="Load Source", + default="env_var", + ) + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for Bootstrap.""" + + name = "Nautobot to Bootstrap" + data_source = "Nautobot" + data_target = "Bootstrap" + description = "Sync information from Nautobot to bootstrap" + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataTarget.""" + return {} + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return () + + def load_source_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.source_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync) + self.source_adapter.load() + + def load_target_adapter(self): + """Load data from Bootstrap into DiffSync models.""" + self.target_adapter = bootstrap.BootstrapAdapter(job=self, sync=self.sync) + self.target_adapter.load() + + def run(self, read_destination, dryrun, memory_profiling, debug, *args, **kwargs): # pylint: disable=arguments-differ + """Perform data synchronization.""" + self.debug = debug + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.read_destination = read_destination + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + + +jobs = [BootstrapDataSource, BootstrapDataTarget] diff --git a/nautobot_ssot/integrations/bootstrap/signals.py b/nautobot_ssot/integrations/bootstrap/signals.py new file mode 100644 index 000000000..07c6d144f --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/signals.py @@ -0,0 +1,115 @@ +"""Signals triggered when Nautobot starts to perform certain actions.""" + +import importlib.util + +from django.conf import settings +from nautobot.core.signals import nautobot_database_ready +from nautobot.extras.choices import CustomFieldTypeChoices + +from nautobot_ssot.utils import create_or_update_custom_field + +LIFECYCLE_MGMT = bool(importlib.util.find_spec("nautobot_device_lifecycle_mgmt")) + + +def register_signals(sender): + """Register signals for IPFabric integration.""" + nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) + + +def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disable=unused-argument + """Adds OS Version and Physical Address CustomField to Devices and System of Record and Last Sync'd to Device, and IPAddress. + + Callback function triggered by the nautobot_database_ready signal when the Nautobot database is fully ready. + """ + # pylint: disable=invalid-name, too-many-locals + ContentType = apps.get_model("contenttypes", "ContentType") + Manufacturer = apps.get_model("dcim", "Manufacturer") + Platform = apps.get_model("dcim", "Platform") + TenantGroup = apps.get_model("tenancy", "TenantGroup") + Tenant = apps.get_model("tenancy", "Tenant") + Team = apps.get_model("extras", "Team") + Contact = apps.get_model("extras", "Contact") + Location = apps.get_model("dcim", "Location") + LocationType = apps.get_model("dcim", "LocationType") + Namespace = apps.get_model("ipam", "Namespace") + RIR = apps.get_model("ipam", "RiR") + VLANGroup = apps.get_model("ipam", "VLANGroup") + VLAN = apps.get_model("ipam", "VLAN") + VRF = apps.get_model("ipam", "VRF") + Prefix = apps.get_model("ipam", "Prefix") + Provider = apps.get_model("circuits", "Provider") + ProviderNetwork = apps.get_model("circuits", "ProviderNetwork") + CircuitType = apps.get_model("circuits", "CircuitType") + Circuit = apps.get_model("circuits", "Circuit") + CircuitTermination = apps.get_model("circuits", "CircuitTermination") + Tag = apps.get_model("extras", "Tag") + Secret = apps.get_model("extras", "Secret") + SecretsGroup = apps.get_model("extras", "SecretsGroup") + DynamicGroup = apps.get_model("extras", "DynamicGroup") + GitRepository = apps.get_model("extras", "GitRepository") + Role = apps.get_model("extras", "Role") + + if LIFECYCLE_MGMT: + SoftwareLCM = apps.get_model("nautobot_device_lifecycle_mgmt", "SoftwareLCM") + SoftwareImageLCM = apps.get_model("nautobot_device_lifecycle_mgmt", "SoftwareImageLCM") + ValidatedSoftwareLCM = apps.get_model("nautobot_device_lifecycle_mgmt", "ValidatedSoftwareLCM") + + signal_to_model_mapping = { + "manufacturer": Manufacturer, + "platform": Platform, + "role": Role, + "tenant_group": TenantGroup, + "tenant": Tenant, + "team": Team, + "contact": Contact, + "location": Location, + "location_type": LocationType, + "namespace": Namespace, + "rir": RIR, + "vlan_group": VLANGroup, + "vlan": VLAN, + "vrf": VRF, + "prefix": Prefix, + "provider": Provider, + "provider_network": ProviderNetwork, + "circuit_type": CircuitType, + "circuit": Circuit, + "circuit_termination": CircuitTermination, + "tag": Tag, + "secret": Secret, + "secrets_group": SecretsGroup, + "dynamic_group": DynamicGroup, + "git_repository": GitRepository, + } + + if LIFECYCLE_MGMT: + signal_to_model_mapping.update( + { + "software": SoftwareLCM, + "software_image": SoftwareImageLCM, + "validated_software": ValidatedSoftwareLCM, + } + ) + + sync_custom_field, _ = create_or_update_custom_field( + key="last_synced_from_sor", + field_type=CustomFieldTypeChoices.TYPE_DATE, + label="Last sync from System of Record", + ) + sor_custom_field, _ = create_or_update_custom_field( + key="system_of_record", + field_type=CustomFieldTypeChoices.TYPE_TEXT, + label="System of Record", + ) + + models_to_sync = settings.PLUGINS_CONFIG.get("nautobot_ssot", {}).get("bootstrap_models_to_sync", {}) + no_cf = ["computed_field", "graph_ql_query"] + try: + for model in models_to_sync: + if model not in no_cf and models_to_sync[model] is True: + model_ct = ContentType.objects.get_for_model(signal_to_model_mapping[model]) + sor_custom_field.content_types.add(model_ct.id) + sync_custom_field.content_types.add(model_ct.id) + except Exception as e: + print(f"Error occurred: {e}") + raise diff --git a/nautobot_ssot/integrations/bootstrap/utils/__init__.py b/nautobot_ssot/integrations/bootstrap/utils/__init__.py new file mode 100644 index 000000000..0b65fdc96 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/utils/__init__.py @@ -0,0 +1,140 @@ +"""Utility functions for working with bootstrap and Nautobot.""" + +import inspect +import os + +from django.contrib.contenttypes.models import ContentType +from django.core.exceptions import ValidationError +from django.core.validators import URLValidator +from django.db import models +from nautobot.extras.datasources.registry import get_datasource_content_choices +from nautobot.extras.models import Contact, Team +from nautobot.extras.utils import FeatureQuery, RoleModelsQuery, TaggableClassesQuery + + +def is_running_tests(): + """Check whether running unittests or actual job.""" + for frame in inspect.stack(): + if frame.filename.endswith("unittest/case.py"): + return True + return False + + +def check_sor_field(model): + """Check if the System of Record field is present and is set to "Bootstrap".""" + return ( + "system_of_record" in model.custom_field_data + and model.custom_field_data["system_of_record"] is not None + and os.getenv("SYSTEM_OF_RECORD", "Bootstrap") in model.custom_field_data["system_of_record"] + ) + + +def get_sor_field_nautobot_object(nb_object): + """Get the System of Record field from an object.""" + _sor = "" + if "system_of_record" in nb_object.custom_field_data: + _sor = ( + nb_object.custom_field_data["system_of_record"] + if nb_object.custom_field_data["system_of_record"] is not None + else "" + ) + return _sor + + +def lookup_content_type(content_model_path, content_type): + """Lookup content type for a GitRepository object.""" + _choices = get_datasource_content_choices(content_model_path) + _found_type = None + for _element in _choices: + if _element[1] == content_type: + _found_type = _element[0] + return _found_type + return None + + +def lookup_content_type_id(nb_model, model_path): + """Find ContentType choices for a model path and return the ContentType ID.""" + _choices = FeatureQuery(nb_model).get_choices() + _found_type = None + for _element in _choices: + if _element[0] == model_path: + _found_type = _element[1] + return _found_type + return None + + +def lookup_content_type_model_path(nb_model, content_id): + """Find ContentType choices for a model path and return the ContentType ID.""" + _choices = FeatureQuery(nb_model).get_choices() + _found_type = None + for _element in _choices: + if _element[1] == content_id: + _found_type = _element[0] + return _found_type + return None + + +def lookup_tag_content_type_model_path(content_id): + """Find model paths for a given ContentType ID for Tag Objects.""" + _content_type = ContentType.objects.get(id=content_id) + return f"{_content_type.model}.{_content_type.name.replace(' ', '')}" + + +def lookup_model_for_taggable_class_id(content_id): + """Find a model path for a given ContentType ID.""" + _choices = TaggableClassesQuery().get_choices() + _found_type = None + for _element in _choices: + if _element[1] == content_id: + _found_type = _element[0] + return _found_type + return None + + +def lookup_content_type_for_taggable_model_path(content_model_path): + """Lookup content type for a GitRepository object.""" + _app_label = content_model_path.split(".", 1)[0] + _model = content_model_path.split(".", 1)[1] + + return ContentType.objects.get(model=_model, app_label=_app_label) + + +def string_to_urlfield(url): + """Turn string url into a URLField object.""" + url_validator = URLValidator() + + try: + url_validator(url) + except ValidationError: + return models.URLField(default="https://example.com", blank=True) + + return models.URLField(default=url, blank=True, null=True) + + +def lookup_model_for_role_id(content_id): + """Find a model path for a given ContentType ID.""" + _choices = RoleModelsQuery().get_choices() + _found_type = None + for _element in _choices: + if _element[1] == content_id: + _found_type = _element[0] + return _found_type + return None + + +def lookup_team_for_contact(team): + """Find a Nautobot Team object by name and return the object.""" + try: + _team = Team.objects.get(name=team) + return _team + except Team.DoesNotExist: + return None + + +def lookup_contact_for_team(contact): + """Find a Nautobot Contact object by name and return the object.""" + try: + _contact = Contact.objects.get(name=contact) + return _contact + except Contact.DoesNotExist: + return None diff --git a/nautobot_ssot/integrations/bootstrap/utils/bootstrap.py b/nautobot_ssot/integrations/bootstrap/utils/bootstrap.py new file mode 100644 index 000000000..b0c76a225 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/utils/bootstrap.py @@ -0,0 +1 @@ +"""Utility functions for working with bootstrap.""" diff --git a/nautobot_ssot/integrations/bootstrap/utils/nautobot.py b/nautobot_ssot/integrations/bootstrap/utils/nautobot.py new file mode 100644 index 000000000..2dfe44757 --- /dev/null +++ b/nautobot_ssot/integrations/bootstrap/utils/nautobot.py @@ -0,0 +1,29 @@ +"""Utility functions for working with Nautobot.""" + +from nautobot.ipam.models import PrefixLocationAssignment, VRFPrefixAssignment + + +def get_vrf_prefix_assignments(prefix): + """Retreive all VRF assignments for a Prefix and return a list of VRF Names.""" + _assignments = [] + _vrf_assignments = VRFPrefixAssignment.objects.filter(prefix_id=prefix.id) + + if _vrf_assignments: + for _vrf in _vrf_assignments: + _assignments.append(f"{_vrf.vrf.name}__{prefix.namespace.name}") + return _assignments + + return None + + +def get_prefix_location_assignments(prefix): + """Retrieve all Location assignments for a Prefix and return a list of Location Names.""" + _locations = [] + _location_assignments = PrefixLocationAssignment.objects.filter(prefix_id=prefix.id) + + if _location_assignments: + for _location in _location_assignments: + _locations.append(_location.location.name) + return _locations + + return None diff --git a/nautobot_ssot/integrations/device42/diffsync/adapters/device42.py b/nautobot_ssot/integrations/device42/diffsync/adapters/device42.py index 8b617b3d9..25e7dbdd7 100644 --- a/nautobot_ssot/integrations/device42/diffsync/adapters/device42.py +++ b/nautobot_ssot/integrations/device42/diffsync/adapters/device42.py @@ -201,6 +201,7 @@ def load_buildings(self): _tags.sort() building = self.building( name=record["name"], + location_type=self.job.building_loctype.name, address=sanitize_string(record["address"]) if record.get("address") else "", latitude=float(round(Decimal(record["latitude"] if record["latitude"] else 0.0), 6)), longitude=float(round(Decimal(record["longitude"] if record["longitude"] else 0.0), 6)), @@ -235,6 +236,7 @@ def load_rooms(self): room = self.room( name=record["name"], building=record["building"], + building_loctype=self.job.building_loctype.name, notes=record["notes"] if record.get("notes") else "", custom_fields=get_custom_field_dict(record["custom_fields"]), tags=_tags, @@ -242,7 +244,9 @@ def load_rooms(self): ) try: self.add(room) - _site = self.get(self.building, record.get("building")) + _site = self.get( + self.building, {"name": record.get("building"), "location_type": self.job.building_loctype.name} + ) _site.add_child(child=room) except ObjectAlreadyExists as err: if self.job.debug: @@ -275,7 +279,13 @@ def load_racks(self): try: self.add(rack) _room = self.get( - self.room, {"name": record["room"], "building": record["building"], "room": record["room"]} + self.room, + { + "name": record["room"], + "building": record["building"], + "building_loctype": self.job.building_loctype.name, + "room": record["room"], + }, ) _room.add_child(child=rack) except ObjectAlreadyExists as err: diff --git a/nautobot_ssot/integrations/device42/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/device42/diffsync/adapters/nautobot.py index d4495787a..184e51575 100644 --- a/nautobot_ssot/integrations/device42/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/device42/diffsync/adapters/nautobot.py @@ -14,7 +14,6 @@ FrontPort, Interface, Location, - LocationType, Manufacturer, Platform, Rack, @@ -161,11 +160,12 @@ def sync_complete(self, source: Adapter, *args, **kwargs): def load_sites(self): """Add Nautobot Site objects as DiffSync Building models.""" - for site in Location.objects.filter(location_type=LocationType.objects.get_or_create(name="Site")[0]): + for site in Location.objects.filter(location_type=self.job.building_loctype.name): self.site_map[site.name] = site.id try: building = self.building( name=site.name, + location_type=self.job.building_loctype.name, address=site.physical_address, latitude=site.latitude, longitude=site.longitude, @@ -191,12 +191,15 @@ def load_rackgroups(self): room = self.room( name=_rg.name, building=_rg.location.name, + building_loctype=self.job.building_loctype.name, notes=_rg.description, custom_fields=nautobot.get_custom_field_dict(_rg.get_custom_fields()), uuid=_rg.id, ) self.add(room) - _site = self.get(self.building, _rg.location.name) + _site = self.get( + self.building, {"name": _rg.location.name, "location_type": self.job.building_loctype.name} + ) _site.add_child(child=room) def load_racks(self): diff --git a/nautobot_ssot/integrations/device42/diffsync/models/__init__.py b/nautobot_ssot/integrations/device42/diffsync/models/__init__.py index 132b83161..f09843ad5 100644 --- a/nautobot_ssot/integrations/device42/diffsync/models/__init__.py +++ b/nautobot_ssot/integrations/device42/diffsync/models/__init__.py @@ -35,6 +35,12 @@ NautobotRoom, NautobotVendor, ) +from nautobot_ssot.integrations.device42.diffsync.models.nautobot.ipam import ( + NautobotIPAddress, + NautobotSubnet, + NautobotVLAN, + NautobotVRFGroup, +) __all__ = ( "PatchPanel", @@ -65,8 +71,12 @@ "NautobotConnection", "NautobotDevice", "NautobotHardware", + "NautobotIPAddress", "NautobotPort", "NautobotRack", "NautobotRoom", + "NautobotSubnet", "NautobotVendor", + "NautobotVLAN", + "NautobotVRFGroup", ) diff --git a/nautobot_ssot/integrations/device42/diffsync/models/base/dcim.py b/nautobot_ssot/integrations/device42/diffsync/models/base/dcim.py index 9515bc10b..3867daec2 100644 --- a/nautobot_ssot/integrations/device42/diffsync/models/base/dcim.py +++ b/nautobot_ssot/integrations/device42/diffsync/models/base/dcim.py @@ -10,10 +10,11 @@ class Building(DiffSyncModel): """Base Building model.""" _modelname = "building" - _identifiers = ("name",) + _identifiers = ("name", "location_type") _attributes = ("address", "latitude", "longitude", "contact_name", "contact_phone", "tags", "custom_fields") _children = {"room": "rooms"} name: str + location_type: str address: Optional[str] = None latitude: Optional[float] = None longitude: Optional[float] = None @@ -29,11 +30,12 @@ class Room(DiffSyncModel): """Base Room model.""" _modelname = "room" - _identifiers = ("name", "building") + _identifiers = ("name", "building", "building_loctype") _attributes = ("notes", "custom_fields") _children = {"rack": "racks"} name: str building: str + building_loctype: str notes: Optional[str] = None racks: List["Rack"] = [] custom_fields: Optional[dict] = None diff --git a/nautobot_ssot/integrations/device42/diffsync/models/nautobot/dcim.py b/nautobot_ssot/integrations/device42/diffsync/models/nautobot/dcim.py index 2ac93bb94..90d2d062a 100644 --- a/nautobot_ssot/integrations/device42/diffsync/models/nautobot/dcim.py +++ b/nautobot_ssot/integrations/device42/diffsync/models/nautobot/dcim.py @@ -14,7 +14,6 @@ from nautobot.dcim.models import FrontPort as OrmFrontPort from nautobot.dcim.models import Interface as OrmInterface from nautobot.dcim.models import Location as OrmSite -from nautobot.dcim.models import LocationType as OrmLocationType from nautobot.dcim.models import Manufacturer as OrmManufacturer from nautobot.dcim.models import Rack as OrmRack from nautobot.dcim.models import RackGroup as OrmRackGroup @@ -52,16 +51,15 @@ class NautobotBuilding(Building): @classmethod def create(cls, adapter, ids, attrs): """Create Site object in Nautobot.""" - adapter.job.logger.info(f"Creating Site {ids['name']}.") + adapter.job.logger.info(f"Creating {ids['location_type']} {ids['name']}.") def_site_status = adapter.status_map[DEFAULTS.get("site_status")] - loc_type = OrmLocationType.objects.get_or_create(name="Site")[0] new_site = OrmSite( name=ids["name"], status_id=def_site_status, physical_address=attrs["address"] if attrs.get("address") else "", latitude=round(Decimal(attrs["latitude"] if attrs["latitude"] else 0.0), 6), longitude=round(Decimal(attrs["longitude"] if attrs["longitude"] else 0.0), 6), - location_type=loc_type, + location_type=adapter.job.building_loctype, contact_name=attrs["contact_name"] if attrs.get("contact_name") else "", contact_phone=attrs["contact_phone"] if attrs.get("contact_phone") else "", ) diff --git a/nautobot_ssot/integrations/device42/jobs.py b/nautobot_ssot/integrations/device42/jobs.py index 3cf996049..5dbe369bc 100644 --- a/nautobot_ssot/integrations/device42/jobs.py +++ b/nautobot_ssot/integrations/device42/jobs.py @@ -3,12 +3,14 @@ from django.templatetags.static import static from django.urls import reverse +from nautobot.dcim.models import LocationType from nautobot.extras.jobs import BooleanVar, ObjectVar from nautobot.extras.models import ExternalIntegration from nautobot_ssot.integrations.device42.diffsync.adapters.device42 import Device42Adapter from nautobot_ssot.integrations.device42.diffsync.adapters.nautobot import NautobotAdapter from nautobot_ssot.integrations.device42.utils.device42 import Device42API +from nautobot_ssot.integrations.device42.utils.nautobot import ensure_contenttypes_on_location_type from nautobot_ssot.jobs.base import DataMapping, DataSource from nautobot_ssot.utils import get_username_password_https_from_secretsgroup @@ -18,6 +20,11 @@ class Device42DataSource(DataSource): # pylint: disable=too-many-instance-attributes """Device42 SSoT Data Source.""" + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + bulk_import = BooleanVar( + description="Perform bulk operations when importing data. CAUTION! Might cause bad data to be pushed to Nautobot.", + default=False, + ) integration = ObjectVar( model=ExternalIntegration, queryset=ExternalIntegration.objects.all(), @@ -25,8 +32,14 @@ class Device42DataSource(DataSource): # pylint: disable=too-many-instance-attri required=True, label="Device42 Instance", ) - debug = BooleanVar(description="Enable for more verbose debug logging", default=False) - bulk_import = BooleanVar(description="Enable using bulk create option for object creation.", default=False) + building_loctype = ObjectVar( + model=LocationType, + queryset=LocationType.objects.all(), + display_field="name", + required=False, + label="Building LocationType", + description="LocationType to use for imported Buildings from Device42. If unspecified, will revert to Site LocationType.", + ) class Meta: """Meta data for Device42.""" @@ -46,7 +59,7 @@ def data_mappings(cls): """List describing the data mappings involved in this DataSource.""" return ( DataMapping( - "Buildings", "/admin/rackraj/building/", "Sites", reverse("dcim:site_list") + "Buildings", "/admin/rackraj/building/", "Locations", reverse("dcim:location_list") ), DataMapping( "Rooms", @@ -134,11 +147,15 @@ def load_target_adapter(self): self.target_adapter.load() def run( # pylint: disable=arguments-differ, too-many-arguments - self, dryrun, memory_profiling, integration, debug, bulk_import, *args, **kwargs + self, dryrun, memory_profiling, integration, debug, bulk_import, building_loctype, *args, **kwargs ): """Perform data synchronization.""" self.integration = integration self.bulk_import = bulk_import + self.building_loctype = building_loctype + if not self.building_loctype: + self.building_loctype = LocationType.objects.get_or_create(name="Site")[0] + ensure_contenttypes_on_location_type(location_type=self.building_loctype) self.debug = debug self.dryrun = dryrun self.memory_profiling = memory_profiling diff --git a/nautobot_ssot/integrations/device42/signals.py b/nautobot_ssot/integrations/device42/signals.py deleted file mode 100644 index 5692db849..000000000 --- a/nautobot_ssot/integrations/device42/signals.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Signals for Device42 integration.""" - -from nautobot.core.signals import nautobot_database_ready - - -def register_signals(sender): - """Register signals for IPFabric integration.""" - nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) - - -# pylint: disable=unused-argument, invalid-name -def nautobot_database_ready_callback(sender, *, apps, **kwargs): - """Ensure Site LocationType created and configured correctly. - - Callback function triggered by the nautobot_database_ready signal when the Nautobot database is fully ready. - """ - ContentType = apps.get_model("contenttypes", "ContentType") - Device = apps.get_model("dcim", "Device") - Site = apps.get_model("dcim", "Location") - RackGroup = apps.get_model("dcim", "RackGroup") - Rack = apps.get_model("dcim", "Rack") - Prefix = apps.get_model("ipam", "Prefix") - VLAN = apps.get_model("ipam", "VLAN") - LocationType = apps.get_model("dcim", "LocationType") - VirtualChassis = apps.get_model("dcim", "VirtualChassis") - - loc_type = LocationType.objects.update_or_create(name="Site")[0] - for obj_type in [Site, RackGroup, Rack, Device, VirtualChassis, Prefix, VLAN]: - loc_type.content_types.add(ContentType.objects.get_for_model(obj_type)) - loc_type.save() diff --git a/nautobot_ssot/integrations/device42/utils/device42.py b/nautobot_ssot/integrations/device42/utils/device42.py index ec2917fb2..4eeb4289f 100644 --- a/nautobot_ssot/integrations/device42/utils/device42.py +++ b/nautobot_ssot/integrations/device42/utils/device42.py @@ -13,20 +13,6 @@ from nautobot_ssot.integrations.device42.diffsync.models.base.ipam import VLAN -class MissingConfigSetting(Exception): - """Exception raised for missing configuration settings. - - Attributes: - message (str): Returned explanation of Error. - """ - - def __init__(self, setting): - """Initialize Exception with Setting that is missing and message.""" - self.setting = setting - self.message = f"Missing configuration setting - {setting}!" - super().__init__(self.message) - - def merge_offset_dicts(orig_dict: dict, offset_dict: dict) -> dict: """Method to merge two dicts and merge a list if found. diff --git a/nautobot_ssot/integrations/device42/utils/nautobot.py b/nautobot_ssot/integrations/device42/utils/nautobot.py index 36e023c73..f0b437f72 100644 --- a/nautobot_ssot/integrations/device42/utils/nautobot.py +++ b/nautobot_ssot/integrations/device42/utils/nautobot.py @@ -9,9 +9,10 @@ from diffsync.exceptions import ObjectNotFound from django.contrib.contenttypes.models import ContentType from nautobot.circuits.models import CircuitType -from nautobot.dcim.models import Device, Interface, Platform +from nautobot.dcim.models import Device, Interface, Location, LocationType, Platform, Rack, RackGroup, VirtualChassis from nautobot.extras.choices import CustomFieldTypeChoices from nautobot.extras.models import CustomField, Relationship, Role, Tag +from nautobot.ipam.models import VLAN, Prefix from netutils.lib_mapper import ANSIBLE_LIB_MAPPER_REVERSE, NAPALM_LIB_MAPPER_REVERSE from taggit.managers import TaggableManager @@ -362,3 +363,13 @@ def apply_vlans_to_port(adapter, device_name: str, mode: str, vlans: list, port: tagged_vlans.append(tagged_vlan) port.tagged_vlans.set(tagged_vlans) port.validated_save() + + +def ensure_contenttypes_on_location_type(location_type: LocationType): + """Ensure that the required ContentTypes are on the specified Building LocationType. + + Args: + location_type (LocationType): The specified LocationType to use when importing Building Locations. + """ + for obj_type in [Location, RackGroup, Rack, Device, VirtualChassis, Prefix, VLAN]: + location_type.content_types.add(ContentType.objects.get_for_model(obj_type)) diff --git a/nautobot_ssot/integrations/dna_center/diffsync/adapters/dna_center.py b/nautobot_ssot/integrations/dna_center/diffsync/adapters/dna_center.py index abccda16a..b101af86c 100644 --- a/nautobot_ssot/integrations/dna_center/diffsync/adapters/dna_center.py +++ b/nautobot_ssot/integrations/dna_center/diffsync/adapters/dna_center.py @@ -72,13 +72,44 @@ def load_locations(self): def load_controller_locations(self): """Load location data for Controller specified in Job form.""" - if self.job.dnac.location.location_type.name == "Site": + if self.job.dnac.location.location_type == self.job.floor_loctype: + self.get_or_instantiate( + self.floor, + ids={ + "name": self.job.dnac.location.name, + "building": self.job.dnac.location.parent.name, + }, + attrs={ + "tenant": self.job.dnac.location.tenant.name if self.job.dnac.location.tenant else None, + "uuid": None, + }, + ) + if ( + self.job.dnac.location.parent.parent + and self.job.dnac.location.parent.parent.location_type == self.job.building_loctype + ): self.get_or_instantiate( self.building, - ids={"name": self.job.dnac.location.name}, + ids={ + "name": self.job.dnac.location.parent.parent.name, + "parent": ( + self.job.dnac.location.parent.parent.parent.name + if self.job.dnac.location.parent.parent.parent + else None + ), + }, + attrs={"uuid": None}, + ) + + if self.job.dnac.location.location_type == self.job.building_loctype: + self.get_or_instantiate( + self.building, + ids={ + "name": self.job.dnac.location.name, + "area": self.job.dnac.location.parent.name if self.job.dnac.location.parent else None, + }, attrs={ "address": self.job.dnac.location.physical_address, - "area": self.job.dnac.location.parent.name if self.job.dnac.location.parent else None, "area_parent": ( self.job.dnac.location.parent.parent.name if self.job.dnac.location.parent and self.job.dnac.location.parent.parent @@ -90,7 +121,7 @@ def load_controller_locations(self): "uuid": None, }, ) - if self.job.dnac.location.parent.location_type.name == "Region": + if self.job.dnac.location.parent.location_type == self.job.area_loctype: self.get_or_instantiate( self.area, ids={ @@ -101,7 +132,10 @@ def load_controller_locations(self): }, attrs={"uuid": None}, ) - if self.job.dnac.location.parent.parent and self.job.dnac.location.parent.parent.location_type.name == "Region": + if ( + self.job.dnac.location.parent.parent + and self.job.dnac.location.parent.parent.location_type == self.job.area_loctype + ): self.get_or_instantiate( self.area, ids={ @@ -138,9 +172,11 @@ def load_areas(self, areas: List[dict]): ) if loaded: if self.job.debug: - self.job.logger.info(f"Loaded area {location['name']}. {location}") + self.job.logger.info(f"Loaded {self.job.area_loctype.name} {location['name']}. {location}") else: - self.job.logger.warning(f"Duplicate area {location['name']} attempting to be loaded.") + self.job.logger.warning( + f"Duplicate {self.job.area_loctype.name} {location['name']} attempting to be loaded." + ) def load_buildings(self, buildings: List[dict]): """Load building data from DNAC into DiffSync model. @@ -149,19 +185,21 @@ def load_buildings(self, buildings: List[dict]): buildings (List[dict]): List of dictionaries containing location information about a building. """ for location in buildings: + if location["parentId"] in self.dnac_location_map: + _area = self.dnac_location_map[location["parentId"]] + else: + _area = {"name": "Global", "parent": None} try: - self.get(self.building, location["name"]) - self.job.logger.warning(f"Building {location['name']} already loaded so skipping.") + self.get(self.building, {"name": location["name"], "area": _area["name"]}) + self.job.logger.warning( + f"{self.job.building_loctype.name} {location['name']} already loaded so skipping." + ) continue except ObjectNotFound: if self.job.debug: - self.job.logger.info(f"Loading building {location['name']}. {location}") + self.job.logger.info(f"Loading {self.job.building_loctype.name} {location['name']}. {location}") address, _ = self.conn.find_address_and_type(info=location["additionalInfo"]) latitude, longitude = self.conn.find_latitude_and_longitude(info=location["additionalInfo"]) - if location["parentId"] in self.dnac_location_map: - _area = self.dnac_location_map[location["parentId"]] - else: - _area = {"name": "Global", "parent": None} new_building = self.building( name=location["name"], address=address if address else "", @@ -193,7 +231,10 @@ def load_floors(self, floors: List[dict]): continue floor_name = f"{_building['name']} - {location['name']}" try: - self.get(self.floor, {"name": floor_name, "building": _building["name"]}) + self.get( + self.floor, + {"name": floor_name, "building": _building["name"]}, + ) self.job.logger.warning(f"Duplicate Floor {floor_name} attempting to be loaded.") except ObjectNotFound: new_floor = self.floor( @@ -205,11 +246,14 @@ def load_floors(self, floors: List[dict]): try: self.add(new_floor) try: - parent = self.get(self.building, _building["name"]) + parent = self.get( + self.building, + {"name": _building["name"], "area": self.dnac_location_map[location["parentId"]]["parent"]}, + ) parent.add_child(new_floor) except ObjectNotFound as err: self.job.logger.warning( - f"Unable to find building {_building['name']} for floor {floor_name}. {err}" + f"Unable to find {self.job.building_loctype.name} {_building['name']} for {self.job.floor_loctype.name} {floor_name}. {err}" ) except ValidationError as err: self.job.logger.warning(f"Unable to load floor {floor_name}. {err}") diff --git a/nautobot_ssot/integrations/dna_center/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/dna_center/diffsync/adapters/nautobot.py index 973b8ec22..542c0d359 100644 --- a/nautobot_ssot/integrations/dna_center/diffsync/adapters/nautobot.py +++ b/nautobot_ssot/integrations/dna_center/diffsync/adapters/nautobot.py @@ -40,7 +40,6 @@ NautobotPort, NautobotPrefix, ) -from nautobot_ssot.jobs.base import DataTarget class NautobotAdapter(Adapter): @@ -69,13 +68,11 @@ class NautobotAdapter(Adapter): prefix_map = {} ipaddr_map = {} - def __init__( - self, *args, job: Optional[DataTarget] = None, sync=None, tenant: Optional[OrmTenant] = None, **kwargs - ): + def __init__(self, *args, job, sync=None, tenant: Optional[OrmTenant] = None, **kwargs): """Initialize Nautobot. Args: - job (DataTarget, optional): Nautobot job. Defaults to None. + job (DataSource): Nautobot job. sync (object, optional): Nautobot DiffSync. Defaults to None. tenant (OrmTenant, optional): Tenant defined in Job form that all non-location objects should belong to. """ @@ -86,85 +83,82 @@ def __init__( self.objects_to_create = defaultdict(list) self.objects_to_delete = defaultdict(list) - def load_regions(self): - """Load Region data from Nautobt into DiffSync models.""" - try: - locations = OrmLocation.objects.filter(location_type=self.locationtype_map["Region"]).select_related( - "parent" - ) - for region in locations: - parent = None - if region.parent: - parent = region.parent.name - if parent not in self.region_map: - self.region_map[parent] = {} - self.region_map[parent][region.name] = region.id - try: - self.get(self.area, {"name": region.name, "parent": parent}) - self.job.logger.warning(f"Region {region.name} already loaded so skipping duplicate.") - except ObjectNotFound: - new_region = self.area( - name=region.name, - parent=parent, - uuid=region.id, - ) - if not PLUGIN_CFG.get("dna_center_delete_locations"): - new_region.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST - self.add(new_region) - except OrmLocationType.DoesNotExist as err: - self.job.logger.warning( - f"Unable to find LocationType: Region so can't find region Locations to load. {err}" - ) + def load_areas(self): + """Load Location data from Nautobot for specified Area LocationType into DiffSync models.""" + areas = OrmLocation.objects.filter(location_type=self.job.area_loctype).select_related("parent") + for area in areas: + parent = None + if area.parent: + parent = area.parent.name + if parent not in self.region_map: + self.region_map[parent] = {} + self.region_map[parent][area.name] = area.id + try: + self.get(self.area, {"name": area.name, "parent": parent}) + self.job.logger.warning( + f"{self.job.area_loctype.name} {area.name} already loaded so skipping duplicate." + ) + except ObjectNotFound: + new_region = self.area( + name=area.name, + parent=parent, + uuid=area.id, + ) + if not PLUGIN_CFG.get("dna_center_delete_locations"): + new_region.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_region) - def load_sites(self): - """Load Site data from Nautobot into DiffSync models.""" - try: - locations = OrmLocation.objects.filter(location_type=self.locationtype_map["Site"]) - for site in locations: - self.site_map[site.name] = site.id - try: - self.get(self.building, {"name": site.name, "area": site.parent.name if site.parent else None}) - except ObjectNotFound: - new_building = self.building( - name=site.name, - address=site.physical_address, - area=site.parent.name if site.parent else "", - area_parent=site.parent.parent.name if site.parent and site.parent.parent else None, - latitude=str(site.latitude).rstrip("0"), - longitude=str(site.longitude).rstrip("0"), - tenant=site.tenant.name if site.tenant else None, - uuid=site.id, - ) - if not PLUGIN_CFG.get("dna_center_delete_locations"): - new_building.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST - self.add(new_building) - except OrmLocationType.DoesNotExist as err: - self.job.logger.warning(f"Unable to find LocationType: Site so can't find site Locations to load. {err}") + def load_buildings(self): + """Load Location data from Nautobot for specified Building LocationType into DiffSync models.""" + buildings = OrmLocation.objects.filter(location_type=self.job.building_loctype) + for building in buildings: + self.site_map[building.name] = building.id + try: + self.get( + self.building, + { + "name": building.name, + "area": building.parent.name if building.parent else None, + }, + ) + except ObjectNotFound: + new_building = self.building( + name=building.name, + address=building.physical_address, + area=building.parent.name if building.parent else "", + area_parent=building.parent.parent.name if building.parent and building.parent.parent else None, + latitude=str(building.latitude).rstrip("0"), + longitude=str(building.longitude).rstrip("0"), + tenant=building.tenant.name if building.tenant else None, + uuid=building.id, + ) + if not PLUGIN_CFG.get("dna_center_delete_locations"): + new_building.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_building) def load_floors(self): """Load LocationType floors from Nautobot into DiffSync models.""" - try: - loc_type = OrmLocationType.objects.get(name="Floor") - locations = OrmLocation.objects.filter(location_type=loc_type) - for location in locations: - self.floor_map[location.name] = location.id - new_floor = self.floor( - name=location.name, - building=location.parent.name if location.parent else "", - tenant=location.tenant.name if location.tenant else None, - uuid=location.id, - ) - self.add(new_floor) - try: - if location.parent: - building = self.get(self.building, location.parent.name) - building.add_child(new_floor) - except ObjectNotFound as err: - self.job.logger.warning( - f"Unable to load building {location.parent.name} for floor {location.name}. {err}" + floors = OrmLocation.objects.filter(location_type=self.job.floor_loctype) + for floor in floors: + self.floor_map[floor.name] = floor.id + new_floor = self.floor( + name=floor.name, + building=floor.parent.name if floor.parent else "", + tenant=floor.tenant.name if floor.tenant else None, + uuid=floor.id, + ) + self.add(new_floor) + try: + if floor.parent: + building = self.get( + self.building, + {"name": floor.parent.name, "area": floor.parent.parent.name}, ) - except OrmLocationType.DoesNotExist as err: - self.job.logger.warning(f"Unable to find LocationType: Floor so can't find floor Locations to load. {err}") + building.add_child(new_floor) + except ObjectNotFound as err: + self.job.logger.warning( + f"Unable to load {self.job.building_loctype.name} {floor.parent.name} for {self.job.floor_loctype.name} {floor.name}. {err}" + ) def load_devices(self): """Load Device data from Nautobot into DiffSync models.""" @@ -392,8 +386,8 @@ def load(self): self.tenant_map = {tenant.name: tenant.id for tenant in OrmTenant.objects.only("id", "name")} self.namespace_map = {ns.name: ns.id for ns in Namespace.objects.only("id", "name")} - self.load_regions() - self.load_sites() + self.load_areas() + self.load_buildings() self.load_floors() self.load_devices() self.load_ports() diff --git a/nautobot_ssot/integrations/dna_center/diffsync/models/base.py b/nautobot_ssot/integrations/dna_center/diffsync/models/base.py index d2f3e70a2..9da609aa7 100644 --- a/nautobot_ssot/integrations/dna_center/diffsync/models/base.py +++ b/nautobot_ssot/integrations/dna_center/diffsync/models/base.py @@ -24,8 +24,8 @@ class Building(DiffSyncModel): """DiffSync model for DNA Center buildings.""" _modelname = "building" - _identifiers = ("name",) - _attributes = ("address", "area", "area_parent", "latitude", "longitude", "tenant") + _identifiers = ("name", "area") + _attributes = ("address", "area_parent", "latitude", "longitude", "tenant") _children = {"floor": "floors"} name: str diff --git a/nautobot_ssot/integrations/dna_center/diffsync/models/nautobot.py b/nautobot_ssot/integrations/dna_center/diffsync/models/nautobot.py index 9ff2d6f18..e712b538e 100644 --- a/nautobot_ssot/integrations/dna_center/diffsync/models/nautobot.py +++ b/nautobot_ssot/integrations/dna_center/diffsync/models/nautobot.py @@ -10,7 +10,6 @@ DeviceType, Interface, Location, - LocationType, Manufacturer, ) from nautobot.extras.models import Role @@ -38,23 +37,25 @@ class NautobotArea(base.Area): def create(cls, adapter, ids, attrs): """Create Region in Nautobot from Area object.""" if adapter.job.debug: - adapter.job.logger.info(f"Creating Region {ids['name']} in {ids['parent']}.") - new_region = Location( + adapter.job.logger.info(f"Creating {adapter.job.area_loctype.name} {ids['name']} in {ids['parent']}.") + new_area = Location( name=ids["name"], - location_type_id=adapter.locationtype_map["Region"], + location_type=adapter.job.area_loctype, status_id=adapter.status_map["Active"], ) try: parents_parent = "Global" if ids["parent"] == "Global": parents_parent = None - new_region.parent_id = adapter.region_map[parents_parent][ids["parent"]] + new_area.parent_id = adapter.region_map[parents_parent][ids["parent"]] except KeyError: - adapter.job.logger.warning(f"Unable to find Region {ids['parent']} for {ids['name']}.") - new_region.validated_save() + adapter.job.logger.warning( + f"Unable to find {adapter.job.area_loctype.name} {ids['parent']} for {ids['name']}." + ) + new_area.validated_save() if ids["parent"] not in adapter.region_map: adapter.region_map[ids["parent"]] = {} - adapter.region_map[ids["parent"]][ids["name"]] = new_region.id + adapter.region_map[ids["parent"]][ids["name"]] = new_area.id return super().create(adapter=adapter, ids=ids, attrs=attrs) def delete(self): @@ -66,7 +67,7 @@ def delete(self): return None area = Location.objects.get(id=self.uuid) if self.adapter.job.debug: - self.adapter.job.logger.info(f"Deleting Region {area.name}.") + self.adapter.job.logger.info(f"Deleting {self.job.area_loctype.name} {area.name}.") self.adapter.objects_to_delete["regions"].append(area) return self @@ -79,19 +80,19 @@ def create(cls, adapter, ids, attrs): """Create Site in Nautobot from Building object.""" if adapter.job.debug: adapter.job.logger.info(f"Creating Site {ids['name']}.") - new_site = Location( + new_building = Location( name=ids["name"], - location_type_id=adapter.locationtype_map["Site"], - parent_id=adapter.region_map[attrs["area_parent"]][attrs["area"]], + location_type=adapter.job.building_loctype, + parent_id=adapter.region_map[attrs["area_parent"]][ids["area"]], physical_address=attrs["address"] if attrs.get("address") else "", status_id=adapter.status_map["Active"], latitude=attrs["latitude"], longitude=attrs["longitude"], ) if attrs.get("tenant"): - new_site.tenant_id = adapter.tenant_map[attrs["tenant"]] - new_site.validated_save() - adapter.site_map[ids["name"]] = new_site.id + new_building.tenant_id = adapter.tenant_map[attrs["tenant"]] + new_building.validated_save() + adapter.site_map[ids["name"]] = new_building.id return super().create(adapter=adapter, ids=ids, attrs=attrs) def update(self, attrs): @@ -129,7 +130,7 @@ def delete(self): return None site = Location.objects.get(id=self.uuid) if self.adapter.job.debug: - self.adapter.job.logger.info(f"Deleting Site {site.name}.") + self.adapter.job.logger.info(f"Deleting {self.adapter.job.building_loctype.name} {site.name}.") self.adapter.objects_to_delete["sites"].append(site) return self @@ -141,12 +142,12 @@ class NautobotFloor(base.Floor): def create(cls, adapter, ids, attrs): """Create LocationType: Floor in Nautobot from Floor object.""" if adapter.job.debug: - adapter.job.logger.info(f"Creating Floor {ids['name']}.") + adapter.job.logger.info(f"Creating {adapter.job.floor_loctype.name} {ids['name']}.") new_floor = Location( name=ids["name"], status_id=adapter.status_map["Active"], parent_id=adapter.site_map[ids["building"]], - location_type_id=adapter.locationtype_map["Floor"], + location_type=adapter.job.floor_loctype, ) if attrs.get("tenant"): new_floor.tenant_id = adapter.tenant_map[attrs["tenant"]] @@ -156,9 +157,9 @@ def create(cls, adapter, ids, attrs): def update(self, attrs): """Update LocationType: Floor in Nautobot from Floor object.""" - floor = Location.objects.get(name=self.name, location_type=LocationType.objects.get(name="Floor")) + floor = Location.objects.get(name=self.name, location_type=self.adapter.job.floor_loctype) if self.adapter.job.debug: - self.adapter.job.logger.info(f"Updating Floor {floor.name} with {attrs}") + self.adapter.job.logger.info(f"Updating {self.adapter.job.floor_loctype.name} {floor.name} with {attrs}") if "tenant" in attrs: if attrs.get("tenant"): floor.tenant_id = self.adapter.tenant_map[attrs["tenant"]] @@ -176,7 +177,9 @@ def delete(self): return None floor = Location.objects.get(id=self.uuid) if self.adapter.job.debug: - self.adapter.job.logger.info(f"Deleting Floor {floor.name} in {floor.parent.name}.") + self.adapter.job.logger.info( + f"Deleting {self.adapter.job.floor_loctype.name} {floor.name} in {floor.parent.name}." + ) self.adapter.objects_to_delete["floors"].append(floor) return self diff --git a/nautobot_ssot/integrations/dna_center/jobs.py b/nautobot_ssot/integrations/dna_center/jobs.py index e52af5189..7767a9ce5 100644 --- a/nautobot_ssot/integrations/dna_center/jobs.py +++ b/nautobot_ssot/integrations/dna_center/jobs.py @@ -2,15 +2,17 @@ from django.templatetags.static import static from django.urls import reverse +from nautobot.apps.jobs import BooleanVar, JSONVar, ObjectVar from nautobot.core.celery import register_jobs -from nautobot.dcim.models import Controller, ControllerManagedDeviceGroup +from nautobot.dcim.models import Controller, LocationType from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.jobs import BooleanVar, ObjectVar from nautobot.tenancy.models import Tenant +from nautobot_ssot.exceptions import ConfigurationError from nautobot_ssot.integrations.dna_center.diffsync.adapters import dna_center, nautobot from nautobot_ssot.integrations.dna_center.utils.dna_center import DnaCenterClient from nautobot_ssot.jobs.base import DataMapping, DataSource +from nautobot_ssot.utils import verify_controller_managed_device_group name = "DNA Center SSoT" # pylint: disable=invalid-name @@ -25,8 +27,42 @@ class DnaCenterDataSource(DataSource): # pylint: disable=too-many-instance-attr required=True, label="DNA Center Controller", ) + area_loctype = ObjectVar( + model=LocationType, + queryset=LocationType.objects.all(), + display_field="display", + required=True, + label="Area LocationType", + description="LocationType to use for imported DNA Center Areas. Must allow nesting.", + ) + building_loctype = ObjectVar( + model=LocationType, + queryset=LocationType.objects.all(), + display_field="display", + required=True, + label="Building LocationType", + description="LocationType to use for imported DNA Center Buildings.", + ) + floor_loctype = ObjectVar( + model=LocationType, + queryset=LocationType.objects.all(), + display_field="display", + required=True, + label="Floor LocationType", + description="LocationType to use for imported DNA Center Floors.", + ) + location_map = JSONVar( + label="Location Mapping", + required=False, + default={}, + description="Map of information regarding Locations in DNA Center. Ex: {'': {'parent': ''}}", + ) + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) - bulk_import = BooleanVar(description="Perform bulk operations when importing data", default=False) + bulk_import = BooleanVar( + description="Perform bulk operations when importing data. CAUTION! Might cause bad data to be pushed to Nautobot.", + default=False, + ) tenant = ObjectVar(model=Tenant, label="Tenant", required=False) class Meta: # pylint: disable=too-few-public-methods @@ -37,6 +73,18 @@ class Meta: # pylint: disable=too-few-public-methods data_target = "Nautobot" description = "Sync information from DNA Center to Nautobot" data_source_icon = static("nautobot_ssot_dna_center/dna_center_logo.png") + has_sensitive_variables = False + field_order = [ + "dryrun", + "bulk_import", + "debug", + "dnac", + "area_loctype", + "building_loctype", + "floor_loctype", + "location_map", + "tenant", + ] def __init__(self): """Initiailize Job vars.""" @@ -60,16 +108,10 @@ def data_mappings(cls): DataMapping("IP Addresses", None, "IP Addresses", reverse("ipam:ipaddress_list")), ) - def get_controller_group(self): - """Method to get or create ControllerManagedDeviceGroup for imported Devices.""" - self.controller_group = ControllerManagedDeviceGroup.objects.update_or_create( - controller=self.dnac, defaults={"name": f"{self.dnac.name} Managed Devices"} - )[0] - def load_source_adapter(self): """Load data from DNA Center into DiffSync models.""" self.logger.info(f"Loading data from {self.dnac.name}") - self.get_controller_group() + verify_controller_managed_device_group(controller=self.dnac) _sg = self.dnac.external_integration.secrets_group username = _sg.get_secret_value( access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, @@ -99,12 +141,40 @@ def load_target_adapter(self): self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync, tenant=self.tenant) self.target_adapter.load() + def validate_locationtypes(self): + """Validate the LocationTypes specified are related and configured correctly.""" + if not self.area_loctype.nestable: + self.logger.error("Area LocationType is not nestable.") + raise ConfigurationError(f"{self.area_loctype.name} LocationType is not nestable.") + if self.building_loctype.parent != self.area_loctype: + self.logger.error( + "LocationType %s is not the parent of %s LocationType. The Area and Building LocationTypes specified must be related.", + self.area_loctype.name, + self.building_loctype.name, + ) + raise ConfigurationError( + f"{self.area_loctype.name} is not parent to {self.building_loctype.name}. Please correct.", + ) + if self.floor_loctype.parent != self.building_loctype: + self.logger.error( + "LocationType %s is not the parent of %s LocationType. The Building and Floor LocationTypes specified must be related.", + self.building_loctype.name, + self.floor_loctype.name, + ) + raise ConfigurationError( + f"{self.building_loctype.name} is not parent to {self.floor_loctype.name}. Please correct.", + ) + def run( self, dryrun, memory_profiling, debug, dnac, + area_loctype, + building_loctype, + floor_loctype, + location_map, bulk_import, tenant, *args, @@ -112,6 +182,11 @@ def run( ): """Perform data synchronization.""" self.dnac = dnac + self.area_loctype = area_loctype + self.building_loctype = building_loctype + self.floor_loctype = floor_loctype + self.validate_locationtypes() + self.location_map = location_map self.tenant = tenant self.debug = debug self.bulk_import = bulk_import diff --git a/nautobot_ssot/integrations/dna_center/signals.py b/nautobot_ssot/integrations/dna_center/signals.py index 8310af49f..cf7643f4c 100644 --- a/nautobot_ssot/integrations/dna_center/signals.py +++ b/nautobot_ssot/integrations/dna_center/signals.py @@ -17,7 +17,6 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa # pylint: disable=invalid-name ContentType = apps.get_model("contenttypes", "ContentType") CustomField = apps.get_model("extras", "CustomField") - LocationType = apps.get_model("dcim", "LocationType") Device = apps.get_model("dcim", "Device") Rack = apps.get_model("dcim", "Rack") RackGroup = apps.get_model("dcim", "RackGroup") @@ -25,12 +24,6 @@ def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disa IPAddress = apps.get_model("ipam", "IPAddress") Prefix = apps.get_model("ipam", "Prefix") - region = LocationType.objects.update_or_create(name="Region", defaults={"nestable": True})[0] - site = LocationType.objects.update_or_create(name="Site", defaults={"nestable": False, "parent": region})[0] - site.content_types.add(ContentType.objects.get_for_model(Device)) - floor = LocationType.objects.update_or_create(name="Floor", defaults={"nestable": False, "parent": site})[0] - floor.content_types.add(ContentType.objects.get_for_model(Device)) - ver_dict = { "key": "os_version", "type": CustomFieldTypeChoices.TYPE_TEXT, diff --git a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py index 75368306b..29f20010e 100644 --- a/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py +++ b/nautobot_ssot/integrations/infoblox/diffsync/adapters/infoblox.py @@ -8,6 +8,7 @@ from diffsync.exceptions import ObjectAlreadyExists from nautobot.extras.plugins.exceptions import PluginImproperlyConfigured +from nautobot_ssot.exceptions import AdapterLoadException from nautobot_ssot.integrations.infoblox.choices import FixedAddressTypeChoices from nautobot_ssot.integrations.infoblox.diffsync.models.infoblox import ( InfobloxDnsARecord, @@ -27,10 +28,6 @@ ) -class AdapterLoadException(Exception): - """Raised when there's an error while loading data.""" - - class InfobloxAdapter(Adapter): """DiffSync adapter using requests to communicate to Infoblox server.""" diff --git a/nautobot_ssot/integrations/infoblox/utils/client.py b/nautobot_ssot/integrations/infoblox/utils/client.py index 6f43442f5..d6ade64c3 100644 --- a/nautobot_ssot/integrations/infoblox/utils/client.py +++ b/nautobot_ssot/integrations/infoblox/utils/client.py @@ -17,6 +17,7 @@ from requests.compat import urljoin from requests.exceptions import HTTPError +from nautobot_ssot.exceptions import InvalidUrlScheme from nautobot_ssot.integrations.infoblox.utils.diffsync import get_ext_attr_dict logger = logging.getLogger("nautobot.ssot.infoblox") @@ -86,19 +87,6 @@ def get_dns_name(possible_fqdn: str) -> str: return dns_name -class InvalidUrlScheme(Exception): - """Exception raised for wrong scheme being passed for URL. - - Attributes: - message (str): Returned explanation of Error. - """ - - def __init__(self, scheme): - """Initialize Exception with wrong scheme in message.""" - self.message = f"Invalid URL scheme '{scheme}' found for Infoblox URL. Please correct to use HTTPS." - super().__init__(self.message) - - class InfobloxApi: # pylint: disable=too-many-public-methods, too-many-instance-attributes """Representation and methods for interacting with Infoblox.""" diff --git a/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py b/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py index e701060df..9a1e1dccf 100644 --- a/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py +++ b/nautobot_ssot/integrations/ipfabric/diffsync/adapter_ipfabric.py @@ -120,7 +120,8 @@ def load(self): # pylint: disable=too-many-locals,too-many-statements filters={"net": ["empty", False], "siteName": ["empty", False]}, columns=["net", "siteName"], ): - networks[network["siteName"]].append(ipaddress.ip_network(network["net"])) + # IPF bug NIM-15635 Fix Version 7.0: 'net' column has host bits set. + networks[network["siteName"]].append(ipaddress.ip_network(network["net"], strict=False)) for location in self.get_all(self.location): if location.name is None: continue diff --git a/nautobot_ssot/integrations/meraki/__init__.py b/nautobot_ssot/integrations/meraki/__init__.py new file mode 100644 index 000000000..e67c01b6f --- /dev/null +++ b/nautobot_ssot/integrations/meraki/__init__.py @@ -0,0 +1 @@ +"""Base module for Meraki integration.""" diff --git a/nautobot_ssot/integrations/meraki/diffsync/__init__.py b/nautobot_ssot/integrations/meraki/diffsync/__init__.py new file mode 100644 index 000000000..4a2396bec --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/__init__.py @@ -0,0 +1 @@ +"""DiffSync models and adapters for Meraki SSoT.""" diff --git a/nautobot_ssot/integrations/meraki/diffsync/adapters/__init__.py b/nautobot_ssot/integrations/meraki/diffsync/adapters/__init__.py new file mode 100644 index 000000000..35a90f04b --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/adapters/__init__.py @@ -0,0 +1 @@ +"""Adapter classes for loading DiffSyncModels with data from Meraki or Nautobot.""" diff --git a/nautobot_ssot/integrations/meraki/diffsync/adapters/meraki.py b/nautobot_ssot/integrations/meraki/diffsync/adapters/meraki.py new file mode 100644 index 000000000..5c1205f6e --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/adapters/meraki.py @@ -0,0 +1,371 @@ +"""Nautobot SSoT for Meraki Adapter for Meraki SSoT plugin.""" + +from diffsync import Adapter, DiffSyncModel +from diffsync.exceptions import ObjectNotFound +from netutils.ip import ipaddress_interface, netmask_to_cidr + +from nautobot_ssot.exceptions import JobException +from nautobot_ssot.integrations.meraki.diffsync.models.meraki import ( + MerakiDevice, + MerakiHardware, + MerakiIPAddress, + MerakiIPAssignment, + MerakiNetwork, + MerakiOSVersion, + MerakiPort, + MerakiPrefix, +) +from nautobot_ssot.integrations.meraki.utils.meraki import get_role_from_devicetype, parse_hostname_for_role + + +class MerakiAdapter(Adapter): + """DiffSync adapter for Meraki.""" + + network = MerakiNetwork + hardware = MerakiHardware + osversion = MerakiOSVersion + device = MerakiDevice + port = MerakiPort + prefix = MerakiPrefix + ipaddress = MerakiIPAddress + ipassignment = MerakiIPAssignment + + top_level = ["network", "hardware", "osversion", "device", "prefix", "ipaddress", "ipassignment"] + + def __init__(self, job, sync, client, tenant=None): + """Initialize Meraki. + + Args: + job (object): Meraki SSoT job. + sync (object): Meraki DiffSync. + client (object): Meraki API client connection object. + tenant (object): Tenant specified in Job form to attach to imported Devices. + """ + super().__init__() + self.job = job + self.sync = sync + self.conn = client + self.tenant = tenant + self.device_map = {} + self.org_uplink_statuses = self.conn.get_org_uplink_statuses() + + def load_networks(self): + """Load networks from Meraki dashboard into DiffSync models.""" + for net in self.conn.get_org_networks(): + parent_name = None + if self.job.network_loctype.parent: + if self.job.parent_location: + parent_name = self.job.parent_location.name + elif self.job.location_map and net in self.job.location_map: + parent_name = self.job.location_map[net]["parent"] + else: + self.job.logger.error( + f"Parent Location is required for {self.job.network_loctype.name} but can't determine parent to be assigned to {net}." + ) + continue + self.get_or_instantiate( + self.network, + ids={"name": net["name"], "parent": parent_name}, + attrs={ + "timezone": net["timeZone"], + "notes": net["notes"].rstrip() if net.get("notes") else "", + "tags": net["tags"], + "tenant": self.tenant.name if self.tenant else None, + "uuid": None, + }, + ) + + def load_devices(self): # pylint: disable=too-many-branches + """Load devices from Meraki dashboard into DiffSync models.""" + self.device_map = {dev["name"]: dev for dev in self.conn.get_org_devices()} + statuses = self.conn.get_org_device_statuses() + status = "Offline" + for dev in self.device_map.values(): + if dev.get("name"): + if dev["name"] in statuses: + if statuses[dev["name"]] == "online": + status = "Active" + try: + self.get(self.device, dev["name"]) + self.job.logger.warning(f"Duplicate device {dev['name']} found and being skipped.") + except ObjectNotFound: + if self.job.hostname_mapping and len(self.job.hostname_mapping) > 0: + if self.job.debug: + self.job.logger.debug(f"Parsing hostname for device {dev['name']} to determine role.") + role = parse_hostname_for_role(dev_hostname=dev["name"], hostname_map=self.job.hostname_mapping) + elif self.job.devicetype_mapping and len(self.job.devicetype_mapping) > 0: + if self.job.debug: + self.job.logger.debug(f"Parsing device model for device {dev['name']} to determine role.") + role = get_role_from_devicetype( + dev_model=dev["model"], devicetype_map=self.job.devicetype_mapping + ) + else: + role = "Unknown" + self.load_hardware_model(device_info=dev) + self.get_or_instantiate(self.osversion, ids={"version": dev["firmware"]}) + new_dev, loaded = self.get_or_instantiate( + self.device, + ids={"name": dev["name"]}, + attrs={ + "controller_group": self.job.instance.controller_managed_device_groups.first().name + if self.job.instance.controller_managed_device_groups.count() != 0 + else "", + "notes": dev["notes"].rstrip(), + "serial": dev["serial"], + "status": status, + "role": role, + "model": dev["model"], + "network": self.conn.network_map[dev["networkId"]]["name"], + "tenant": self.tenant.name if self.tenant else None, + "uuid": None, + "version": dev["firmware"], + }, + ) + if loaded: + if dev["model"].startswith(("MX", "MG", "Z")): + self.load_firewall_ports(device=new_dev, serial=dev["serial"], network_id=dev["networkId"]) + if dev["model"].startswith("MS"): + self.load_switch_ports(device=new_dev, serial=dev["serial"]) + if dev["model"].startswith("MR"): + self.load_ap_ports(device=new_dev, serial=dev["serial"]) + else: + self.job.logger.warning(f"Device serial {dev['serial']} is missing hostname so will be skipped.") + + def load_hardware_model(self, device_info: dict): + """Load hardware model from device information.""" + try: + self.get(self.hardware, device_info["model"]) + except ObjectNotFound: + new_hardware = self.hardware( + model=device_info["model"], + uuid=None, + ) + self.add(new_hardware) + + def load_firewall_ports(self, device: DiffSyncModel, serial: str, network_id: str): # pylint: disable=too-many-locals + """Load ports of a firewall, cellular, or teleworker device from Meraki dashboard into DiffSync models.""" + mgmt_ports = self.conn.get_management_ports(serial=serial) + uplink_settings = self.conn.get_uplink_settings(serial=serial) + lan_ports = self.conn.get_appliance_switchports(network_id=network_id) + + # keep track of whether a primary IP has already been found since we can only assign one + primary_found = False + for port in mgmt_ports.keys(): + uplink_status = "Planned" + if serial in self.org_uplink_statuses: + uplinks = self.org_uplink_statuses[serial]["uplinks"] + for link in uplinks: + if link["interface"] == port and link["status"] == "active": + uplink_status = "Active" + port_uplink_settings = uplink_settings[port] + new_port, loaded = self.get_or_instantiate( + self.port, + ids={"name": port, "device": device.name}, + attrs={ + "management": True, + "enabled": port_uplink_settings["enabled"], + "port_type": "1000base-t", + "port_status": uplink_status, + "tagging": port_uplink_settings["vlanTagging"]["enabled"], + "uuid": None, + }, + ) + if loaded: + self.add(new_port) + device.add_child(new_port) + if port_uplink_settings["svis"]["ipv4"]["assignmentMode"] == "static": + port_svis = port_uplink_settings["svis"]["ipv4"] + prefix = ipaddress_interface(ip=port_svis["address"], attr="network.with_prefixlen") + self.load_prefix( + location=self.conn.network_map[network_id]["name"], + prefix=prefix, + ) + self.load_ipaddress( + address=port_svis["address"], + prefix=prefix, + ) + self.load_ipassignment( + address=port_svis["address"], + dev_name=device.name, + port=port, + primary=bool(uplink_status == "Active" and not primary_found), + ) + if uplink_status == "Active": + primary_found = True + if lan_ports: + self.process_lan_ports(device, lan_ports) + + def process_lan_ports(self, device: DiffSyncModel, lan_ports: dict): + """Load the switchports for a Device into DiffSync models. + + Args: + device (DiffSyncModel): Loaded Device DiffSyncModel to associate with Port to be loaded. + lan_ports (dict): Dictionary of switchport data. + """ + for port in lan_ports: + new_port, loaded = self.get_or_instantiate( + self.port, + ids={"name": str(port["number"]), "device": device.name}, + attrs={ + "management": False, + "enabled": port["enabled"], + "port_type": "1000base-t", + "port_status": "Active", + "tagging": bool(port["type"] == "trunk"), + "uuid": None, + }, + ) + if loaded: + self.add(new_port) + device.add_child(new_port) + + def load_switch_ports(self, device: DiffSyncModel, serial: str): + """Load ports of a switch device from Meraki dashboard into DiffSync models.""" + mgmt_ports = self.conn.get_management_ports(serial=serial) + org_switchports = self.conn.get_org_switchports() + + for port in mgmt_ports.keys(): + try: + self.get(self.port, {"name": port, "device": device.name}) + except ObjectNotFound: + mgmt_port = self.port( + name=port, + device=device.name, + management=True, + enabled=True, + port_type="1000base-t", + port_status="Active", + tagging=False, + uuid=None, + ) + self.add(mgmt_port) + device.add_child(mgmt_port) + if mgmt_ports[port].get("usingStaticIp"): + prefix = ipaddress_interface( + ip=f"{mgmt_ports[port]['staticIp']}/{netmask_to_cidr(netmask=mgmt_ports[port]['staticSubnetMask'])}", + attr="network.with_prefixlen", + ) + self.load_prefix( + location=self.conn.network_map[self.device_map[device.name]["networkId"]]["name"], + prefix=prefix, + ) + self.load_ipaddress( + address=f"{mgmt_ports[port]['staticIp']}/{netmask_to_cidr(mgmt_ports[port]['staticSubnetMask'])}", + prefix=prefix, + ) + self.load_ipassignment( + address=f"{mgmt_ports[port]['staticIp']}/{netmask_to_cidr(mgmt_ports[port]['staticSubnetMask'])}", + dev_name=device.name, + port=port, + primary=True, + ) + if serial in org_switchports: + for port in org_switchports[serial]["ports"]: + new_port = self.port( + name=port["portId"], + device=device.name, + management=False, + enabled=port["enabled"], + port_type="1000base-t", + port_status="Active", + tagging=bool(port["type"] == "trunk"), + uuid=None, + ) + self.add(new_port) + device.add_child(new_port) + + def load_ap_ports(self, device: DiffSyncModel, serial: str): + """Load ports of a MR device from Meraki dashboard into DiffSync models.""" + mgmt_ports = self.conn.get_management_ports(serial=serial) + + for port in mgmt_ports.keys(): + try: + self.get(self.port, {"name": port, "device": device.name}) + except ObjectNotFound: + new_port = self.port( + name=port, + device=device.name, + management=True, + enabled=True, + port_type="1000base-t", + port_status="Active", + tagging=False, + uuid=None, + ) + self.add(new_port) + device.add_child(new_port) + if mgmt_ports[port].get("usingStaticIp"): + prefix = ipaddress_interface( + ip=f"{mgmt_ports[port]['staticIp']}/{netmask_to_cidr(netmask=mgmt_ports[port]['staticSubnetMask'])}", + attr="network.with_prefixlen", + ) + self.load_prefix( + location=self.conn.network_map[self.device_map[device.name]["networkId"]]["name"], + prefix=prefix, + ) + self.load_ipaddress( + address=f"{mgmt_ports[port]['staticIp']}/{netmask_to_cidr(mgmt_ports[port]['staticSubnetMask'])}", + prefix=prefix, + ) + self.load_ipassignment( + address=f"{mgmt_ports[port]['staticIp']}/{netmask_to_cidr(mgmt_ports[port]['staticSubnetMask'])}", + dev_name=device.name, + port=port, + primary=True, + ) + + def load_prefix(self, location: str, prefix: str): + """Load Prefixes of devices into DiffSync models.""" + if self.tenant: + namespace = self.tenant.name + else: + namespace = "Global" + try: + self.get(self.prefix, {"prefix": prefix, "namespace": namespace}) + except ObjectNotFound: + new_pf = self.prefix( + prefix=prefix, + location=location, + namespace=namespace, + tenant=self.tenant.name if self.tenant else None, + uuid=None, + ) + self.add(new_pf) + + def load_ipaddress(self, address: str, prefix: str): + """Load IPAddresses of devices into DiffSync models.""" + try: + self.get(self.ipaddress, {"address": address, "prefix": prefix}) + except ObjectNotFound: + new_ip = self.ipaddress( + address=address, + prefix=prefix, + tenant=self.tenant.name if self.tenant else None, + uuid=None, + ) + self.add(new_ip) + + def load_ipassignment(self, address: str, dev_name: str, port: str, primary: bool): + """Load IPAddressesToInterface of devices into DiffSync models.""" + namespace = self.tenant.name if self.tenant else "Global" + try: + self.get(self.ipassignment, {"address": address, "device": dev_name, "namespace": namespace, "port": port}) + except ObjectNotFound: + new_map = self.ipassignment( + address=address, + namespace=namespace, + device=dev_name, + port=port, + primary=primary, + uuid=None, + ) + self.add(new_map) + + def load(self): + """Load data from Meraki into DiffSync models.""" + if self.conn.validate_organization_exists(): + self.load_networks() + self.load_devices() + else: + self.job.logger.error("Specified organization ID not found in Meraki dashboard.") + raise JobException("Incorrect Organization ID specified.") diff --git a/nautobot_ssot/integrations/meraki/diffsync/adapters/nautobot.py b/nautobot_ssot/integrations/meraki/diffsync/adapters/nautobot.py new file mode 100644 index 000000000..3af64c44a --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/adapters/nautobot.py @@ -0,0 +1,362 @@ +"""Nautobot Adapter for Meraki SSoT plugin.""" + +from collections import defaultdict +from typing import Optional + +from diffsync import Adapter +from diffsync.enum import DiffSyncModelFlags +from diffsync.exceptions import ObjectNotFound +from django.contrib.contenttypes.models import ContentType +from django.db.models import ProtectedError +from nautobot.dcim.models import ( + Device, + DeviceType, + Interface, + Location, + LocationType, + Manufacturer, + Platform, + SoftwareVersion, +) +from nautobot.extras.models import Note, Role, Status +from nautobot.ipam.models import IPAddress, IPAddressToInterface, Namespace, Prefix +from nautobot.tenancy.models import Tenant + +from nautobot_ssot.integrations.meraki.diffsync.models.nautobot import ( + NautobotDevice, + NautobotHardware, + NautobotIPAddress, + NautobotIPAssignment, + NautobotNetwork, + NautobotOSVersion, + NautobotPort, + NautobotPrefix, +) +from nautobot_ssot.integrations.meraki.utils.nautobot import get_tag_strings + + +class NautobotAdapter(Adapter): # pylint: disable=too-many-instance-attributes + """DiffSync adapter for Nautobot.""" + + network = NautobotNetwork + hardware = NautobotHardware + osversion = NautobotOSVersion + device = NautobotDevice + port = NautobotPort + prefix = NautobotPrefix + ipaddress = NautobotIPAddress + ipassignment = NautobotIPAssignment + + top_level = ["network", "hardware", "osversion", "device", "prefix", "ipaddress", "ipassignment"] + + status_map = {} + tenant_map = {} + locationtype_map = {} + region_map = {} + site_map = {} + platform_map = {} + manufacturer_map = {} + devicerole_map = {} + devicetype_map = {} + device_map = {} + port_map = {} + namespace_map = {} + prefix_map = {} + ipaddr_map = {} + contenttype_map = {} + version_map = {} + + def __init__(self, job, sync=None, tenant: Optional[Tenant] = None): + """Initialize Nautobot. + + Args: + job (object): Nautobot job. + sync (object, optional): Nautobot DiffSync. Defaults to None. + tenant (Tenant, optional): Nautobot Tenant to assign to loaded objects. Defaults to None. + """ + super().__init__() + self.job = job + self.sync = sync + self.tenant = tenant + self.objects_to_create = defaultdict(list) + self.objects_to_delete = defaultdict(list) + + def load_sites(self): + """Load Site data from Nautobot into DiffSync model.""" + for site in Location.objects.filter(location_type=self.job.network_loctype): + self.site_map[site.name] = site + new_site, _ = self.get_or_instantiate( + self.network, + ids={"name": site.name, "parent": site.parent.name if site.parent else None}, + attrs={ + "notes": "", + "tags": get_tag_strings(list_tags=site.tags), + "timezone": str(site.time_zone) if site.time_zone else None, + "tenant": site.tenant.name if site.tenant else None, + "uuid": site.id, + }, + ) + if site.notes: + note = site.notes.last() + new_site.notes = note.note.rstrip() + + def load_devicetypes(self): + """Load DeviceType data from Nautobot into DiffSync model.""" + for devtype in DeviceType.objects.filter(manufacturer__name="Cisco Meraki"): + try: + self.get(self.hardware, devtype.model) + except ObjectNotFound: + new_dt = self.hardware(model=devtype.model, uuid=devtype.id) + if self.tenant: + new_dt.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_dt) + self.devicetype_map[devtype.model] = devtype.id + + def load_softwareversions(self): + """Load SoftwareVersion data from Nautobot into DiffSync model.""" + for ver in SoftwareVersion.objects.filter(platform__name="Cisco Meraki"): + self.get_or_instantiate(self.osversion, ids={"version": ver.version}, attrs={"uuid": ver.id}) + self.version_map[ver.version] = ver.id + + def load_devices(self): + """Load Device data from Nautobot into DiffSync model.""" + if self.tenant: + devices = Device.objects.filter(tenant=self.tenant) + else: + devices = Device.objects.filter(_custom_field_data__system_of_record="Meraki SSoT") + for dev in devices: + try: + self.get(self.device, dev.name) + except ObjectNotFound: + self.device_map[dev.name] = dev.id + self.port_map[dev.name] = {} + new_dev = self.device( + name=dev.name, + controller_group=dev.controller_managed_device_group.name + if dev.controller_managed_device_group + else None, + serial=dev.serial, + status=dev.status.name, + role=dev.role.name, + model=dev.device_type.model, + notes="", + network=dev.location.name, + tenant=dev.tenant.name if dev.tenant else None, + uuid=dev.id, + version=dev.software_version.version if dev.software_version else None, + ) + if dev.notes: + note = dev.notes.last() + new_dev.notes = note.note.rstrip() + if self.tenant: + new_dev.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_dev) + + def load_ports(self): + """Load Port data from Nautobot into DiffSync model.""" + if self.tenant: + ports = Interface.objects.filter(device__tenant=self.tenant) + else: + ports = Interface.objects.filter(_custom_field_data__system_of_record="Meraki SSoT") + for intf in ports: + try: + self.get(self.port, {"name": intf.name, "device": intf.device.name}) + except ObjectNotFound: + self.port_map[intf.device.name][intf.name] = intf.id + new_port = self.port( + name=intf.name, + device=intf.device.name, + management=intf.mgmt_only, + enabled=intf.enabled, + port_type=intf.type, + port_status=intf.status.name, + tagging=bool(intf.mode != "access"), + uuid=intf.id, + ) + if self.tenant: + new_port.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_port) + dev = self.get(self.device, intf.device.name) + dev.add_child(new_port) + + def load_prefixes(self): + """Load Prefixes from Nautobot into DiffSync models.""" + if self.tenant: + prefixes = Prefix.objects.filter(tenant=self.tenant) + else: + prefixes = Prefix.objects.filter(_custom_field_data__system_of_record="Meraki SSoT") + for prefix in prefixes: + new_pf = self.prefix( + prefix=str(prefix.prefix), + location=prefix.location.name if prefix.location else "", + namespace=prefix.namespace.name, + tenant=prefix.tenant.name if prefix.tenant else None, + uuid=prefix.id, + ) + if self.tenant: + new_pf.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_pf) + self.prefix_map[str(prefix.prefix)] = prefix.id + + def load_ipaddresses(self): + """Load IPAddresses from Nautobot into DiffSync models.""" + if self.tenant: + addresses = IPAddress.objects.filter(tenant=self.tenant) + else: + addresses = IPAddress.objects.filter(_custom_field_data__system_of_record="Meraki SSoT") + for ipaddr in addresses: + if str(ipaddr.parent.namespace) not in self.ipaddr_map: + self.ipaddr_map[str(ipaddr.parent.namespace)] = {} + self.ipaddr_map[str(ipaddr.parent.namespace)][str(ipaddr.address)] = ipaddr.id + new_ip = self.ipaddress( + address=str(ipaddr.address), + prefix=str(ipaddr.parent.prefix) if ipaddr.parent else "", + tenant=ipaddr.tenant.name if ipaddr.tenant else None, + uuid=ipaddr.id, + ) + if self.tenant: + new_ip.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_ip) + + def load_ipassignments(self): + """Load IPAddressToInterface from Nautobot into DiffSync models.""" + if self.tenant: + mappings = IPAddressToInterface.objects.filter(ip_address__tenant=self.tenant) + else: + mappings = IPAddressToInterface.objects.filter( + ip_address___custom_field_data__system_of_record="Meraki SSoT" + ) + for ipassignment in mappings: + new_map = self.ipassignment( + address=str(ipassignment.ip_address.address), + namespace=ipassignment.ip_address.parent.namespace.name, + device=ipassignment.interface.device.name, + port=ipassignment.interface.name, + primary=len(ipassignment.ip_address.primary_ip4_for.all()) > 0 + or len(ipassignment.ip_address.primary_ip6_for.all()) > 0, + uuid=ipassignment.id, + ) + if self.tenant: + new_map.model_flags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + self.add(new_map) + + def sync_complete(self, source: Adapter, *args, **kwargs): + """Clean up function for DiffSync sync. + + Once the sync is complete, this function runs deleting any objects + from Nautobot that need to be deleted in a specific order. + + Args: + source (Adapter): Source DiffSync Adapter. + args (dict): Positional arguments. + kwargs (dict): Keyword arguments. + """ + for grouping in ( + "ipaddrs", + "prefixes", + "ports", + "devices", + "devicetypes", + ): + for nautobot_object in self.objects_to_delete[grouping]: + try: + if self.job.debug: + self.job.logger.info(f"Deleting {nautobot_object}.") + nautobot_object.delete() + except ProtectedError: + self.job.logger.warning(f"Deletion failed protected object: {nautobot_object}") + self.objects_to_delete[grouping] = [] + + self.process_objects_to_create() + return super().sync_complete(source, *args, **kwargs) + + def process_objects_to_create(self): # pylint: disable=too-many-branches + """Process all of the objects that have been added to the objects_to_create dictionary.""" + if len(self.objects_to_create["devicetypes"]) > 0: + self.job.logger.info("Performing bulk create of DeviceTypes in Nautobot") + DeviceType.objects.bulk_create(self.objects_to_create["devicetypes"], batch_size=250) + if len(self.objects_to_create["devices"]) > 0: + self.job.logger.info("Performing bulk create of Devices in Nautobot") + Device.objects.bulk_create(self.objects_to_create["devices"], batch_size=250) + if len(self.objects_to_create["ports"]) > 0: + self.job.logger.info("Performing bulk create of Interfaces in Nautobot") + Interface.objects.bulk_create(self.objects_to_create["ports"], batch_size=250) + if len(self.objects_to_create["prefixes"]) > 0: + self.job.logger.info("Performing bulk create of Prefixes in Nautobot") + Prefix.objects.bulk_create(self.objects_to_create["prefixes"], batch_size=250) + if len(self.objects_to_create["prefix_locs"]) > 0: + self.job.logger.info("Performing assignment of Locations to Prefixes in Nautobot") + for pair in self.objects_to_create["prefix_locs"]: + update_pf = Prefix.objects.get(id=pair[0]) + update_pf.locations.add(pair[1]) + if len(self.objects_to_create["ipaddrs"]) > 0: + self.job.logger.info("Performing bulk create of IP Addresses in Nautobot") + IPAddress.objects.bulk_create(self.objects_to_create["ipaddrs"], batch_size=250) + if len(self.objects_to_create["ipaddrs-to-prefixes"]) > 0: + self.job.logger.info("Assigning parent Prefix to IPAddresses with bulk_update.") + assigned_parents = [] + for pair in self.objects_to_create["ipaddrs-to-prefixes"]: + ipaddr = pair[0] + ipaddr.parent_id = pair[1] + assigned_parents.append(ipaddr) + IPAddress.objects.bulk_update(assigned_parents, ["parent_id"], batch_size=250) + if len(self.objects_to_create["ipaddrs-to-intfs"]) > 0: + self.job.logger.info("Performing assignment of IPAddress to Port.") + IPAddressToInterface.objects.bulk_create(self.objects_to_create["ipaddrs-to-intfs"], batch_size=250) + if len(self.objects_to_create["device_primary_ip4"]) > 0: + self.job.logger.info("Performing bulk update of IPv4 addresses in Nautobot.") + device_primary_ip_objs = [] + for devip in self.objects_to_create["device_primary_ip4"]: + dev = Device.objects.get(id=devip[0]) + dev.primary_ip4_id = devip[1] + device_primary_ip_objs.append(dev) + Device.objects.bulk_update(device_primary_ip_objs, ["primary_ip4_id"], batch_size=250) + if len(self.objects_to_create["device_primary_ip6"]) > 0: + self.job.logger.info("Performing bulk update of IPv6 addresses in Nautobot.") + device_primary_ip_objs = [] + for devip in self.objects_to_create["device_primary_ip6"]: + dev = Device.objects.get(id=devip[0]) + dev.primary_ip6_id = devip[1] + device_primary_ip_objs.append(dev) + Device.objects.bulk_update(device_primary_ip_objs, ["primary_ip6_id"], batch_size=250) + if len(self.objects_to_create["notes"]) > 0: + self.job.logger.info("Performing bulk create of Notes in Nautobot") + Note.objects.bulk_create(self.objects_to_create["notes"], batch_size=250) + + def load(self): + """Load data from Nautobot into DiffSync models.""" + if self.job.tenant: + Namespace.objects.get_or_create(name=self.job.tenant.name) + if self.job.hostname_mapping and len(self.job.hostname_mapping) > 0: + for mapping in self.job.hostname_mapping: + new_role, _ = Role.objects.get_or_create(name=mapping[1]) + new_role.content_types.add(ContentType.objects.get_for_model(Device)) + if self.job.devicetype_mapping and len(self.job.devicetype_mapping) > 0: + for mapping in self.job.devicetype_mapping: + new_role, _ = Role.objects.get_or_create(name=mapping[1]) + new_role.content_types.add(ContentType.objects.get_for_model(Device)) + self.status_map = {s.name: s.id for s in Status.objects.only("id", "name")} + self.locationtype_map = {lt.name: lt.id for lt in LocationType.objects.only("id", "name")} + self.platform_map = {p.name: p.id for p in Platform.objects.only("id", "name")} + self.manufacturer_map = {m.name: m.id for m in Manufacturer.objects.only("id", "name")} + self.devicerole_map = {d.name: d.id for d in Role.objects.only("id", "name")} + self.namespace_map = {ns.name: ns.id for ns in Namespace.objects.only("id", "name")} + self.contenttype_map = {c.model: c.id for c in ContentType.objects.only("id", "model")} + + if self.job.parent_location: + self.region_map[self.job.parent_location.name] = Location.objects.get(name=self.job.parent_location).id + else: + self.region_map = { + loc_data["parent"]: Location.objects.get(name=loc_data["parent"]).id + for _, loc_data in self.job.location_map.items() + } + self.tenant_map = {t.name: t.id for t in Tenant.objects.only("id", "name")} + + self.load_sites() + self.load_devicetypes() + self.load_softwareversions() + self.load_devices() + self.load_ports() + self.load_prefixes() + self.load_ipaddresses() + self.load_ipassignments() diff --git a/nautobot_ssot/integrations/meraki/diffsync/models/__init__.py b/nautobot_ssot/integrations/meraki/diffsync/models/__init__.py new file mode 100644 index 000000000..e27fdbb6c --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/models/__init__.py @@ -0,0 +1 @@ +"""DiffSync models and adapters for the Meraki SSoT app.""" diff --git a/nautobot_ssot/integrations/meraki/diffsync/models/base.py b/nautobot_ssot/integrations/meraki/diffsync/models/base.py new file mode 100644 index 000000000..a8aeedc64 --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/models/base.py @@ -0,0 +1,146 @@ +"""DiffSyncModel subclasses for Nautobot-to-Meraki data sync.""" + +from typing import List, Optional +from uuid import UUID + +from diffsync import DiffSyncModel +from diffsync.enum import DiffSyncModelFlags + + +class Network(DiffSyncModel): + """DiffSync model for Meraki networks.""" + + model_flags: DiffSyncModelFlags = DiffSyncModelFlags.SKIP_UNMATCHED_DST + + _modelname = "network" + _identifiers = ( + "name", + "parent", + ) + _attributes = ("timezone", "notes", "tags", "tenant") + _children = {} + + name: str + parent: Optional[str] = None + timezone: Optional[str] = None + notes: Optional[str] = None + tags: Optional[List[str]] = None + tenant: Optional[str] = None + + uuid: Optional[UUID] = None + + +class Hardware(DiffSyncModel): + """DiffSync model for Meraki models.""" + + _modelname = "hardware" + _identifiers = ("model",) + _attributes = () + _children = {} + + model: str + + uuid: Optional[UUID] = None + + +class OSVersion(DiffSyncModel): + """DiffSync model for Meraki device software versions.""" + + _modelname = "osversion" + _identifiers = ("version",) + _attributes = () + _children = {} + + version: str + + uuid: Optional[UUID] = None + + +class Device(DiffSyncModel): + """DiffSync model for Meraki devices.""" + + _modelname = "device" + _identifiers = ("name",) + _attributes = ("controller_group", "notes", "serial", "status", "role", "model", "network", "tenant", "version") + _children = {"port": "ports"} + + name: str + controller_group: Optional[str] = None + notes: Optional[str] = None + serial: Optional[str] = None + status: Optional[str] = None + role: Optional[str] = None + model: Optional[str] = None + network: str + tenant: Optional[str] = None + version: Optional[str] = None + ports: List["Port"] = [] + + uuid: Optional[UUID] = None + + +class Port(DiffSyncModel): + """DiffSync model for Meraki device ports.""" + + _modelname = "port" + _identifiers = ("name", "device") + _attributes = ("management", "enabled", "port_type", "port_status", "tagging") + _children = {} + + name: str + device: str + management: bool + enabled: bool + port_type: str + port_status: str + tagging: bool + + uuid: Optional[UUID] = None + + +class Prefix(DiffSyncModel): + """DiffSync model for Meraki Prefixes.""" + + _modelname = "prefix" + _identifiers = ("prefix", "namespace") + _attributes = ("location", "tenant") + _children = {} + + prefix: str + namespace: str + location: str + tenant: Optional[str] = None + + uuid: Optional[UUID] = None + + +class IPAddress(DiffSyncModel): + """DiffSync model for Meraki IP Addresses.""" + + _modelname = "ipaddress" + _identifiers = ("address", "prefix") + _attributes = ("tenant",) + _children = {} + + address: str + prefix: str + tenant: Optional[str] = None + + uuid: Optional[UUID] = None + + +class IPAssignment(DiffSyncModel): + """DiffSync model for Citrix ADM tracking IPAddress on particular Device interfaces.""" + + _modelname = "ipassignment" + _identifiers = ("address", "device", "namespace", "port") + _attributes = ("primary",) + _children = {} + + address: str + namespace: str + device: str + port: str + primary: bool + + uuid: Optional[UUID] = None diff --git a/nautobot_ssot/integrations/meraki/diffsync/models/meraki.py b/nautobot_ssot/integrations/meraki/diffsync/models/meraki.py new file mode 100644 index 000000000..f9d788a2a --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/models/meraki.py @@ -0,0 +1,149 @@ +# pylint: disable=useless-parent-delegation +"""Nautobot SSoT for Meraki DiffSync models for Nautobot SSoT for Meraki SSoT.""" + +from nautobot_ssot.integrations.meraki.diffsync.models.base import ( + Device, + Hardware, + IPAddress, + IPAssignment, + Network, + OSVersion, + Port, + Prefix, +) + + +class MerakiNetwork(Network): + """Meraki implementation of Network DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Network in Meraki from MerakiNetwork object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Network in Meraki from MerakiNetwork object.""" + return super().update(attrs) + + def delete(self): + """Delete Network in Meraki from MerakiNetwork object.""" + return self + + +class MerakiHardware(Hardware): + """Meraki implementation of Hardware DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Hardware in Meraki from MerakiHardware object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Hardware in Meraki from MerakiHardware object.""" + return super().update(attrs) + + def delete(self): + """Delete Hardware in Meraki from MerakiHardware object.""" + return self + + +class MerakiOSVersion(OSVersion): + """Meraki implementation of OSVersion DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create OSVersion in Meraki from MerakiOSVersion object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update OSVersion in Meraki from MerakiOSVersion object.""" + return super().update(attrs) + + def delete(self): + """Delete OSVersion in Meraki from MerakiOSVersion object.""" + return self + + +class MerakiDevice(Device): + """Meraki implementation of Device DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Device in Meraki from MerakiDevice object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Device in Meraki from MerakiDevice object.""" + return super().update(attrs) + + def delete(self): + """Delete Device in Meraki from MerakiDevice object.""" + return self + + +class MerakiPort(Port): + """Meraki implementation of Port DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Port in Meraki from MerakiPort object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Port in Meraki from MerakiPort object.""" + return super().update(attrs) + + def delete(self): + """Delete Port in Meraki from MerakiPort object.""" + return self + + +class MerakiPrefix(Prefix): + """Meraki implementation of Prefix DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Prefix in Meraki from MerakiPrefix object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Prefix in Meraki from MerakiPrefix object.""" + return super().update(attrs) + + def delete(self): + """Delete Prefix in Meraki from MerakiPrefix object.""" + return self + + +class MerakiIPAddress(IPAddress): + """Meraki implementation of IPAddress DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create IPAddress in Meraki from MerakiIPAddress object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update IPAddress in Meraki from MerakiIPAddress object.""" + return super().update(attrs) + + def delete(self): + """Delete IPAddress in Meraki from MerakiIPAddress object.""" + return self + + +class MerakiIPAssignment(IPAssignment): + """Meraki implementation of IPAddress DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create IPAddressToInterface in Meraki from MerakiIPAssignment object.""" + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update IPAddressToInterface in Meraki from MerakiIPAssignment object.""" + return super().update(attrs) + + def delete(self): + """Delete IPAddressToInterface in Meraki from MerakiIPAssignment object.""" + return self diff --git a/nautobot_ssot/integrations/meraki/diffsync/models/nautobot.py b/nautobot_ssot/integrations/meraki/diffsync/models/nautobot.py new file mode 100644 index 000000000..5fc6c9769 --- /dev/null +++ b/nautobot_ssot/integrations/meraki/diffsync/models/nautobot.py @@ -0,0 +1,397 @@ +"""Nautobot DiffSync models for Meraki SSoT.""" + +from datetime import datetime + +from nautobot.dcim.models import Device as NewDevice +from nautobot.dcim.models import DeviceType, Interface, Location, SoftwareVersion +from nautobot.extras.models import Note, Role +from nautobot.ipam.models import IPAddress as OrmIPAddress +from nautobot.ipam.models import IPAddressToInterface +from nautobot.ipam.models import Prefix as OrmPrefix + +from nautobot_ssot.integrations.meraki.diffsync.models.base import ( + Device, + Hardware, + IPAddress, + IPAssignment, + Network, + OSVersion, + Port, + Prefix, +) + + +class NautobotNetwork(Network): + """Nautobot implementation of Network DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Site in Nautobot from NautobotNetwork object.""" + new_site = Location( + name=ids["name"], + location_type=adapter.job.network_loctype, + parent_id=adapter.region_map[ids["parent"]] if ids.get("parent") else None, + status_id=adapter.status_map["Active"], + time_zone=attrs["timezone"], + ) + if attrs.get("notes"): + new_note = Note( + note=attrs["notes"], + user=adapter.job.user, + assigned_object_type_id=adapter.contenttype_map["location"], + assigned_object_id=new_site.id, + ) + adapter.objects_to_create["notes"].append(new_note) + if attrs.get("tags"): + new_site.tags.set(attrs["tags"]) + for tag in new_site.tags.all(): + tag.content_types.add(adapter.contenttype_map["location"]) + if attrs.get("tenant"): + new_site.tenant_id = adapter.tenant_map[attrs["tenant"]] + new_site.validated_save() + adapter.site_map[ids["name"]] = new_site + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Site in Nautobot from NautobotNetwork object.""" + site = Location.objects.get(id=self.uuid) + if "timezone" in attrs: + site.time_zone = attrs["timezone"] + if attrs.get("notes"): + new_note = Note( + note=attrs["notes"], + user=self.adapter.job.user, + assigned_object_type_id=self.adapter.contenttype_map["location"], + assigned_object_id=site.id, + ) + new_note.validated_save() + if "tags" in attrs: + site.tags.set(attrs["tags"]) + for tag in site.tags.all(): + tag.content_types.add(self.adapter.contenttype_map["location"]) + if "tenant" in attrs: + if attrs.get("tenant"): + site.tenant_id = self.adapter.tenant_map[attrs["tenant"]] + else: + site.tenant = None + site.validated_save() + return super().update(attrs) + + +class NautobotHardware(Hardware): + """Nautobot implementation of Hardware DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create DeviceType in Nautobot from NautobotHardware object.""" + new_dt = DeviceType(model=ids["model"], manufacturer_id=adapter.manufacturer_map["Cisco Meraki"]) + adapter.objects_to_create["devicetypes"].append(new_dt) + adapter.devicetype_map[ids["model"]] = new_dt.id + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def delete(self): + """Delete DeviceType in Nautobot from NautobotHardware object.""" + super().delete() + devicetype = DeviceType.objects.get(id=self.uuid) + self.adapter.objects_to_delete["devicetypes"].append(devicetype) + return self + + +class NautobotOSVersion(OSVersion): + """Nautobot implementation of Hardware DiffSync model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create SoftwareVersion in Nautobot from NautobotOSVersion object.""" + new_ver = SoftwareVersion( + version=ids["version"], + status_id=adapter.status_map["Active"], + platform_id=adapter.platform_map["Cisco Meraki"], + ) + new_ver.validated_save() + adapter.version_map[ids["version"]] = new_ver.id + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def delete(self): + """Delete DeviceType in Nautobot from NautobotHardware object.""" + super().delete() + osversion = SoftwareVersion.objects.get(id=self.uuid) + osversion.delete() + return self + + +class NautobotDevice(Device): + """Nautobot implementation of Meraki Device model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Device in Nautobot from NautobotDevice object.""" + dev_role, created = Role.objects.get_or_create(name=attrs["role"]) + if created: + dev_role.content_types.add(adapter.contenttype_map["device"]) + adapter.devicerole_map[attrs["role"]] = dev_role.id + new_device = NewDevice( + name=ids["name"], + platform_id=adapter.platform_map["Cisco Meraki"], + serial=attrs["serial"], + status_id=adapter.status_map[attrs["status"]], + role_id=adapter.devicerole_map[attrs["role"]], + device_type_id=adapter.devicetype_map[attrs["model"]], + location=adapter.site_map[attrs["network"]], + controller_managed_device_group=adapter.job.instance.controller_managed_device_groups.first(), + ) + if attrs.get("notes"): + new_note = Note( + note=attrs["notes"], + user=adapter.job.user, + assigned_object_type_id=adapter.contenttype_map["device"], + assigned_object_id=new_device.id, + ) + adapter.objects_to_create["notes"].append(new_note) + if attrs.get("tags"): + new_device.tags.set(attrs["tags"]) + for tag in new_device.tags.all(): + tag.content_types.add(adapter.contenttype_map["device"]) + if "tenant" in attrs: + if attrs.get("tenant"): + new_device.tenant_id = adapter.tenant_map[attrs["tenant"]] + else: + new_device.tenant = None + if attrs.get("version"): + new_device.software_version_id = adapter.version_map[attrs["version"]] + new_device.cf["system_of_record"] = "Meraki SSoT" + new_device.cf["last_synced_from_sor"] = datetime.today().date().isoformat() + adapter.objects_to_create["devices"].append(new_device) + adapter.device_map[new_device.name] = new_device.id + adapter.port_map[new_device.name] = {} + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): # pylint: disable=too-many-branches + """Update Device in Nautobot from NautobotDevice object.""" + device = NewDevice.objects.get(id=self.uuid) + if "controller_group" in attrs: + device.controller_managed_device_group = self.adapter.job.instance.controller_managed_device_groups.first() + if "serial" in attrs: + device.serial = attrs["serial"] + if "status" in attrs: + device.status_id = self.adapter.status_map[attrs["status"]] + if "role" in attrs: + device.role_id = self.adapter.devicerole_map[attrs["role"]] + if "model" in attrs: + device.device_type_id = self.adapter.devicetype_map[attrs["model"]] + if "network" in attrs: + device.location = self.adapter.site_map[attrs["network"]] + if attrs.get("notes"): + new_note = Note( + note=attrs["notes"], + user=self.adapter.job.user, + assigned_object_type_id=self.adapter.contenttype_map["device"], + assigned_object_id=device.id, + ) + new_note.validated_save() + if "tags" in attrs: + device.tags.set(attrs["tags"]) + for tag in device.tags.all(): + tag.content_types.add(self.adapter.contenttype_map["device"]) + if "tenant" in attrs: + if attrs.get("tenant"): + device.tenant_id = self.adapter.tenant_map[attrs["tenant"]] + else: + device.tenant = None + if "version" in attrs: + device.software_version_id = self.adapter.version_map[attrs["version"]] + device.cf["system_of_record"] = "Meraki SSoT" + device.cf["last_synced_from_sor"] = datetime.today().date().isoformat() + device.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Device in Nautobot from NautobotDevice object.""" + dev = NewDevice.objects.get(id=self.uuid) + super().delete() + self.adapter.objects_to_delete["devices"].append(dev) + return self + + +class NautobotPort(Port): + """Nautobot implementation of Meraki Port model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Interface in Nautobot from NautobotDevice object.""" + new_port = Interface( + name=ids["name"], + device_id=adapter.device_map[ids["device"]], + enabled=attrs["enabled"], + mode="access" if not attrs["tagging"] else "tagged", + mgmt_only=attrs["management"], + type=attrs["port_type"], + status_id=adapter.status_map[attrs["port_status"]], + ) + new_port.custom_field_data["system_of_record"] = "Meraki SSoT" + new_port.custom_field_data["last_synced_from_sor"] = datetime.today().date().isoformat() + adapter.objects_to_create["ports"].append(new_port) + adapter.port_map[ids["device"]][ids["name"]] = new_port.id + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Interface in Nautobot from NautobotDevice object.""" + port = Interface.objects.get(id=self.uuid) + if "enabled" in attrs: + port.enabled = attrs["enabled"] + if "tagging" in attrs: + port.mode = "access" if not attrs["tagging"] else "tagged" + if "management" in attrs: + port.mgmt_only = attrs["management"] + if "port_type" in attrs: + port.type = attrs["port_type"] + if "port_status" in attrs: + port.status_id = self.adapter.status_map[attrs["port_status"]] + port.custom_field_data["system_of_record"] = "Meraki SSoT" + port.custom_field_data["last_synced_from_sor"] = datetime.today().date().isoformat() + port.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Interface in Nautobot from NautobotDevice object.""" + port = Interface.objects.get(id=self.uuid) + super().delete() + self.adapter.objects_to_delete["ports"].append(port) + return self + + +class NautobotPrefix(Prefix): + """Nautobot implementation of Meraki Port model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create Prefix in Nautobot from NautobotPrefix object.""" + new_pf = OrmPrefix( + prefix=ids["prefix"], + namespace_id=adapter.namespace_map[ids["namespace"]], + status_id=adapter.status_map["Active"], + tenant_id=adapter.tenant_map[attrs["tenant"]] if attrs.get("tenant") else None, + ) + if attrs.get("location"): + adapter.objects_to_create["prefix_locs"].append((new_pf.id, adapter.site_map[attrs["location"]])) + new_pf.custom_field_data["system_of_record"] = "Meraki SSoT" + new_pf.custom_field_data["last_synced_from_sor"] = datetime.today().date().isoformat() + adapter.objects_to_create["prefixes"].append(new_pf) + adapter.prefix_map[ids["prefix"]] = new_pf.id + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update Prefix in Nautobot from NautobotPrefix object.""" + prefix = OrmPrefix.objects.get(id=self.uuid) + if "location" in attrs: + if attrs.get("location"): + prefix.locations.add(self.adapter.site_map[attrs["location"]]) + else: + prefix.locations.remove(self.adapter.site_map[self.location]) + if "tenant" in attrs: + if attrs.get("tenant"): + prefix.tenant_id = self.adapter.tenant_map[attrs["tenant"]] + else: + prefix.tenant = None + prefix.custom_field_data["system_of_record"] = "Meraki SSoT" + prefix.custom_field_data["last_synced_from_sor"] = datetime.today().date().isoformat() + prefix.validated_save() + return super().update(attrs) + + def delete(self): + """Delete Prefix in Nautobot from NautobotPrefix object.""" + del_pf = OrmPrefix.objects.get(id=self.uuid) + super().delete() + self.adapter.objects_to_delete["prefixes"].append(del_pf) + return self + + +class NautobotIPAddress(IPAddress): + """Nautobot implementation of Meraki Port model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create IPAddress in Nautobot from NautobotIPAddress object.""" + namespace = attrs["tenant"] if attrs.get("tenant") else "Global" + new_ip = OrmIPAddress( + address=ids["address"], + namespace=adapter.namespace_map[namespace], + status_id=adapter.status_map["Active"], + tenant_id=adapter.tenant_map[attrs["tenant"]] if attrs.get("tenant") else None, + ) + adapter.objects_to_create["ipaddrs-to-prefixes"].append((new_ip, adapter.prefix_map[ids["prefix"]])) + new_ip.cf["system_of_record"] = "Meraki SSoT" + new_ip.cf["last_synced_from_sor"] = datetime.today().date().isoformat() + adapter.objects_to_create["ipaddrs"].append(new_ip) + if namespace not in adapter.ipaddr_map: + adapter.ipaddr_map[namespace] = {} + adapter.ipaddr_map[namespace][ids["address"]] = new_ip.id + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update IPAddress in Nautobot from NautobotIPAddress object.""" + ipaddr = OrmIPAddress.objects.get(id=self.uuid) + if "tenant" in attrs: + if attrs.get("tenant"): + ipaddr.tenant_id = self.adapter.tenant_map[attrs["tenant"]] + else: + ipaddr.tenant = None + ipaddr.cf["system_of_record"] = "Meraki SSoT" + ipaddr.cf["last_synced_from_sor"] = datetime.today().date().isoformat() + ipaddr.validated_save() + return super().update(attrs) + + def delete(self): + """Delete IPAddress in Nautobot from NautobotIPAddress object.""" + ipaddr = OrmIPAddress.objects.get(id=self.uuid) + super().delete() + self.adapter.objects_to_delete["ipaddrs"].append(ipaddr) + return self + + +class NautobotIPAssignment(IPAssignment): + """Nautobot implementation of Citrix ADM IPAddressOnInterface model.""" + + @classmethod + def create(cls, adapter, ids, attrs): + """Create IPAddressToInterface in Nautobot from IPAddressOnInterface object.""" + new_map = IPAddressToInterface( + ip_address_id=adapter.ipaddr_map[ids["namespace"]][ids["address"]], + interface_id=adapter.port_map[ids["device"]][ids["port"]], + ) + adapter.objects_to_create["ipaddrs-to-intfs"].append(new_map) + if attrs.get("primary"): + if ":" in ids["address"]: + adapter.objects_to_create["device_primary_ip6"].append( + (adapter.device_map[ids["device"]], adapter.ipaddr_map[ids["namespace"]][ids["address"]]) + ) + else: + adapter.objects_to_create["device_primary_ip4"].append( + (adapter.device_map[ids["device"]], adapter.ipaddr_map[ids["namespace"]][ids["address"]]) + ) + return super().create(adapter=adapter, ids=ids, attrs=attrs) + + def update(self, attrs): + """Update IP Address in Nautobot from IPAddressOnInterface object.""" + mapping = IPAddressToInterface.objects.get(id=self.uuid) + if attrs.get("primary"): + if mapping.ip_address.ip_version == 4: + self.adapter.objects_to_create["device_primary_ip4"].append( + (mapping.interface.device.id, mapping.ip_address.id) + ) + else: + self.adapter.objects_to_create["device_primary_ip6"].append( + (mapping.interface.device.id, mapping.ip_address.id) + ) + mapping.validated_save() + return super().update(attrs) + + def delete(self): + """Delete IPAddressToInterface in Nautobot from NautobotIPAddressOnInterface object.""" + mapping = IPAddressToInterface.objects.get(id=self.uuid) + super().delete() + self.adapter.job.logger.info( + f"Deleting IPAddress to Interface mapping between {self.address} and {self.device}'s {self.port} port." + ) + mapping.delete() + return self diff --git a/nautobot_ssot/integrations/meraki/jobs.py b/nautobot_ssot/integrations/meraki/jobs.py new file mode 100644 index 000000000..66d58735b --- /dev/null +++ b/nautobot_ssot/integrations/meraki/jobs.py @@ -0,0 +1,169 @@ +"""Jobs for Meraki SSoT integration.""" + +from ast import literal_eval + +from diffsync.enum import DiffSyncFlags +from django.urls import reverse +from nautobot.core.celery import register_jobs +from nautobot.dcim.models import Controller, Location, LocationType +from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices +from nautobot.extras.jobs import BooleanVar, JSONVar, ObjectVar, StringVar +from nautobot.tenancy.models import Tenant + +from nautobot_ssot.exceptions import JobException +from nautobot_ssot.integrations.meraki.diffsync.adapters import meraki, nautobot +from nautobot_ssot.integrations.meraki.utils.meraki import DashboardClient +from nautobot_ssot.jobs.base import DataMapping, DataSource +from nautobot_ssot.utils import verify_controller_managed_device_group + +name = "Meraki SSoT" # pylint: disable=invalid-name + + +class MerakiDataSource(DataSource): # pylint: disable=too-many-instance-attributes + """Meraki SSoT Data Source.""" + + instance = ObjectVar( + model=Controller, + queryset=Controller.objects.all(), + description="Controller with ExternalIntegration containing information for connecting to Meraki dashboard.", + display_field="display", + label="Meraki Controller", + required=True, + ) + network_loctype = ObjectVar( + model=LocationType, + queryset=LocationType.objects.all(), + description="LocationType to use for imported Networks.", + display_field="display", + label="Network LocationType", + required=True, + ) + parent_location = ObjectVar( + model=Location, + queryset=Location.objects.all(), + query_params={"location_type": "$network_loctype.parent"}, + description="Default parent Location to assign imported Networks as.", + display_field="display", + label="Parent Location", + required=False, + ) + location_map = JSONVar( + label="Location Mapping", + required=False, + default={}, + description="Map of information regarding Networks in Meraki and their parent Location(s).", + ) + hostname_mapping = StringVar( + label="Hostname Mapping", + required=False, + default=[], + description="List of tuples containing Device hostnames to assign to specified Role. ex: [('core-router.com', 'router')]", + ) + devicetype_mapping = StringVar( + label="DeviceType Mapping", + required=False, + default=[], + description="List of tuples containing DeviceTypes to assign to a specified Role. ex: [('MX', 'Firewall')]", + ) + debug = BooleanVar(description="Enable for more verbose debug logging", default=False) + tenant = ObjectVar(model=Tenant, label="Tenant", required=False) + + def __init__(self): + """Initialize job objects.""" + super().__init__() + self.data = None + self.diffsync_flags = DiffSyncFlags.CONTINUE_ON_FAILURE + + class Meta: # pylint: disable=too-few-public-methods + """Meta data for Meraki.""" + + name = "Meraki => Nautobot" + data_source = "Meraki" + data_target = "Nautobot" + description = "Sync information from Meraki to Nautobot" + field_order = [ + "dryrun", + "debug", + "instance", + "network_loctype", + "parent_location", + "location_map", + "hostname_mapping", + "devicetype_mapping", + "tenant", + ] + + @classmethod + def config_information(cls): + """Dictionary describing the configuration of this DataSource.""" + return {} + + @classmethod + def data_mappings(cls): + """List describing the data mappings involved in this DataSource.""" + return ( + DataMapping("Networks", None, "Locations", reverse("dcim:location_list")), + DataMapping("Devices", None, "Devices", reverse("dcim:device_list")), + DataMapping("Ports", None, "Interfaces", reverse("dcim:interface_list")), + DataMapping("Prefixes", None, "Prefixes", reverse("ipam:prefix_list")), + DataMapping("IP Addresses", None, "IP Addresses", reverse("ipam:ipaddress_list")), + ) + + def validate_settings(self): + """Confirm the settings in the Job form are valid.""" + if self.network_loctype.parent and ( + not self.parent_location + and (not self.location_map or not all("parent" in value for value in self.location_map.values())) + ): + network_loctype = self.network_loctype.name + self.logger.error( + f"{network_loctype} requires a parent Location be provided when creating {network_loctype} Locations and the Parent Location and Location Mapping fields are undefined." + ) + raise JobException(message="Parent Location is required but undefined in Job form.") + + def load_source_adapter(self): + """Load data from Meraki into DiffSync models.""" + verify_controller_managed_device_group(controller=self.instance) + self.validate_settings() + _sg = self.instance.external_integration.secrets_group + org_id = _sg.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_USERNAME, + ) + token = _sg.get_secret_value( + access_type=SecretsGroupAccessTypeChoices.TYPE_HTTP, + secret_type=SecretsGroupSecretTypeChoices.TYPE_TOKEN, + ) + client = DashboardClient(logger=self, org_id=org_id, token=token) + self.source_adapter = meraki.MerakiAdapter(job=self, sync=self.sync, client=client, tenant=self.tenant) + self.source_adapter.load() + + def load_target_adapter(self): + """Load data from Nautobot into DiffSync models.""" + self.target_adapter = nautobot.NautobotAdapter(job=self, sync=self.sync, tenant=self.tenant) + self.target_adapter.load() + + def run( + self, + dryrun, + memory_profiling, + debug, + *args, + **kwargs, + ): # pylint: disable=arguments-differ + """Perform data synchronization.""" + self.dryrun = dryrun + self.memory_profiling = memory_profiling + self.instance = kwargs["instance"] + self.network_loctype = kwargs["network_loctype"] + self.parent_location = kwargs["parent_location"] + self.location_map = kwargs["location_map"] + self.debug = debug + self.tenant = kwargs["tenant"] + self.hostname_mapping = literal_eval(kwargs["hostname_mapping"]) + self.devicetype_mapping = literal_eval(kwargs["devicetype_mapping"]) + super().run(dryrun=self.dryrun, memory_profiling=self.memory_profiling, *args, **kwargs) + + +jobs = [MerakiDataSource] +register_jobs(*jobs) diff --git a/nautobot_ssot/integrations/meraki/signals.py b/nautobot_ssot/integrations/meraki/signals.py new file mode 100644 index 000000000..6285fd844 --- /dev/null +++ b/nautobot_ssot/integrations/meraki/signals.py @@ -0,0 +1,53 @@ +"""Signals triggered when Nautobot starts to perform certain actions.""" + +from nautobot.core.signals import nautobot_database_ready +from nautobot.extras.choices import CustomFieldTypeChoices + + +def register_signals(sender): + """Register signals for DNA Center integration.""" + nautobot_database_ready.connect(nautobot_database_ready_callback, sender=sender) + + +def nautobot_database_ready_callback(sender, *, apps, **kwargs): # pylint: disable=unused-argument + """Adds OS Version and Physical Address CustomField to Devices and System of Record and Last Sync'd to Device, and IPAddress. + + Callback function triggered by the nautobot_database_ready signal when the Nautobot database is fully ready. + """ + # pylint: disable=invalid-name, too-many-locals + ContentType = apps.get_model("contenttypes", "ContentType") + CustomField = apps.get_model("extras", "CustomField") + Device = apps.get_model("dcim", "Device") + Interface = apps.get_model("dcim", "Interface") + Prefix = apps.get_model("ipam", "Prefix") + IPAddress = apps.get_model("ipam", "IPAddress") + Manufacturer = apps.get_model("dcim", "Manufacturer") + Platform = apps.get_model("dcim", "Platform") + + cisco_manu = Manufacturer.objects.get_or_create(name="Cisco Meraki")[0] + plat_dict = { + "name": "Cisco Meraki", + "manufacturer": cisco_manu, + "network_driver": "cisco_meraki", + } + Platform.objects.update_or_create(name__icontains="Meraki", defaults=plat_dict) + + sysrecord_cf_dict = { + "type": CustomFieldTypeChoices.TYPE_TEXT, + "key": "system_of_record", + "label": "System of Record", + } + sysrecord_custom_field, _ = CustomField.objects.update_or_create( + key=sysrecord_cf_dict["key"], defaults=sysrecord_cf_dict + ) + last_sync_cf_dict = { + "type": CustomFieldTypeChoices.TYPE_DATE, + "key": "last_synced_from_sor", + "label": "Last sync from System of Record", + } + last_sync_custom_field, _ = CustomField.objects.update_or_create( + key=last_sync_cf_dict["key"], defaults=last_sync_cf_dict + ) + for model in [Device, Interface, Prefix, IPAddress]: + sysrecord_custom_field.content_types.add(ContentType.objects.get_for_model(model)) + last_sync_custom_field.content_types.add(ContentType.objects.get_for_model(model)) diff --git a/nautobot_ssot/integrations/meraki/utils/__init__.py b/nautobot_ssot/integrations/meraki/utils/__init__.py new file mode 100644 index 000000000..a2ea3b2fe --- /dev/null +++ b/nautobot_ssot/integrations/meraki/utils/__init__.py @@ -0,0 +1 @@ +"""Utility functions for working with Meraki and Nautobot.""" diff --git a/nautobot_ssot/integrations/meraki/utils/meraki.py b/nautobot_ssot/integrations/meraki/utils/meraki.py new file mode 100644 index 000000000..cfd58ede1 --- /dev/null +++ b/nautobot_ssot/integrations/meraki/utils/meraki.py @@ -0,0 +1,240 @@ +"""Utility functions for working with Meraki.""" + +import re + +import meraki + + +class DashboardClient: + """Client for interacting with Meraki dashboard.""" + + def __init__(self, logger, org_id: str, token: str, *args, **kwargs): + """Initialize Meraki dashboard client.""" + self.logger = logger + self.org_id = org_id + self.token = token + self.conn = self.connect_dashboard() + self.network_map = {} + + def connect_dashboard(self) -> meraki.DashboardAPI: # pylint: disable=inconsistent-return-statements + """Connect to Meraki dashboard and return connection object. + + Raises: + err: APIError if issue with connecting to Meraki dashboard. + + Returns: + meraki.DashboardAPI: Connection to Meraki dashboard. + """ + try: + dashboard = meraki.DashboardAPI( + api_key=self.token, + base_url="https://api.meraki.com/api/v1/", + output_log=False, + print_console=False, + ) + return dashboard + except meraki.APIError as err: + self.logger.log.error(f"Unable to connect to Meraki dashboard: {err.message}") + raise err + + def validate_organization_exists(self) -> bool: + """Confirm defined organization ID is seen in Dashboard to confirm we have access. + + Returns: + bool: Whether Organiztion ID was found in Dashboard. + """ + orgs = self.conn.organizations.getOrganizations() + ids = [org["id"] for org in orgs] + if self.org_id in ids: + return True + return False + + def get_org_networks(self) -> list: + """Retrieve all networks for specified Organization ID. + + Returns: + list: List of found networks. Empty list if error retrieving networks. + """ + networks = [] + try: + networks = self.conn.organizations.getOrganizationNetworks(organizationId=self.org_id) + self.network_map = {net["id"]: net for net in networks} + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return networks + + def get_org_devices(self) -> list: + """Retrieve all devices for specified Organization ID. + + Returns: + list: List of found devices. Empty list if error retrieving devices. + """ + devices = [] + try: + devices = self.conn.organizations.getOrganizationDevices(organizationId=self.org_id) + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return devices + + def get_org_uplink_statuses(self) -> dict: + """Retrieve appliance uplink statuses for MX, MG, and Z devices for specified Organization ID. + + Returns: + dict: Map of Device serial to uplink settings for those MX, MG, and Z devices in specified organization ID. + """ + settings_map = {} + try: + result = self.conn.organizations.getOrganizationUplinksStatuses(organizationId=self.org_id) + settings_map = {net["serial"]: net for net in result} + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return settings_map + + def get_org_switchports(self) -> dict: + """Retrieve all ports for switches in specified organization ID. + + Returns: + dict: Map of Device serial to switchport information for specified organization ID. + """ + port_map = {} + try: + result = self.conn.switch.getOrganizationSwitchPortsBySwitch(organizationId=self.org_id) + port_map = {switch["serial"]: switch for switch in result} + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return port_map + + def get_org_device_statuses(self) -> dict: + """Retrieve device statuses from Meraki dashboard. + + Returns: + dict: Dictionary of Device name with its status as value. + """ + statuses = {} + try: + response = self.conn.organizations.getOrganizationDevicesStatuses(organizationId=self.org_id) + statuses = {dev["name"]: dev["status"] for dev in response} + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return statuses + + def get_management_ports(self, serial: str) -> dict: + """Retrieve device management ports from Meraki dashboard. + + Args: + serial (str): Serial of device to retrieve management ports for. + + Returns: + list: List of management ports and associated information. + """ + ports = {} + try: + ports = self.conn.devices.getDeviceManagementInterface(serial=serial) + if ports.get("ddnsHostnames"): + ports.pop("ddnsHostnames") + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return ports + + def get_uplink_settings(self, serial: str) -> dict: + """Retrieve settings for uplink ports from Meraki dashboard. + + Args: + serial (str): Serial of device to retrieve uplink settings for. + + Returns: + dict: Dictionary of uplink settings for device with specified serial. + """ + ports = {} + try: + ports = self.conn.appliance.getDeviceApplianceUplinksSettings(serial=serial) + ports = ports["interfaces"] + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return ports + + def get_switchport_statuses(self, serial: str) -> dict: + """Retrieve statuses for all switchports on specified MS Device. + + Args: + serial (str): Serial of MS device in question. + + Returns: + dict: Map of switch ports and associated information. + """ + port_statuses = {} + try: + result = self.conn.switch.getDeviceSwitchPortsStatuses(serial=serial) + port_statuses = {port["portId"]: port for port in result} + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return port_statuses + + def get_appliance_switchports(self, network_id: str) -> list: + """Retrieve switchports for MX devices in specified network ID. + + Args: + network_id (str): Network ID that MX device belongs to. + + Returns: + list: List of switchports for network that MX device belongs to. + """ + ports = [] + try: + ports = self.conn.appliance.getNetworkAppliancePorts(networkId=network_id) + except meraki.APIError as err: + self.logger.logger.warning( + f"Meraki API error: {err}\nstatus code = {err.status}\nreason = {err.reason}\nerror = {err.message}" + ) + return ports + + +def parse_hostname_for_role(dev_hostname: str, hostname_map: dict) -> str: + """Parse device hostname to get Device Role. + + Args: + dev_hostname (str): Hostname of Device to determine role of. + hostname_map (dict): Dictionary of hostname's mapped to their Role. + + Returns: + str: Name of DeviceRole. Defaults to Unknown. + """ + dev_role = "Unknown" + for entry in hostname_map: + match = re.match(pattern=entry[0], string=dev_hostname) + if match: + dev_role = entry[1] + return dev_role + + +def get_role_from_devicetype(dev_model: str, devicetype_map: dict) -> str: + """Get Device Role using DeviceType from devicetype_mapping Setting. + + Args: + dev_model (str): Hardware model of Device to determine role of. + devicetype_map (dict): Dictionary of DeviceType's mapped to their Role. + + Returns: + str: Name of DeviceRole. Defaults to Unknown. + """ + dev_role = "Unknown" + for entry in devicetype_map: + if entry[0] in dev_model: + dev_role = entry[1] + return dev_role diff --git a/nautobot_ssot/integrations/meraki/utils/nautobot.py b/nautobot_ssot/integrations/meraki/utils/nautobot.py new file mode 100644 index 000000000..a6972f721 --- /dev/null +++ b/nautobot_ssot/integrations/meraki/utils/nautobot.py @@ -0,0 +1,22 @@ +"""Utility functions for working with Nautobot.""" + +from typing import List + +from taggit.managers import TaggableManager + + +def get_tag_strings(list_tags: TaggableManager) -> List[str]: + """Gets string values of all Tags in a list. + + This is the opposite of the `get_tags` function. + + Args: + list_tags (TaggableManager): List of Tag objects to convert to strings. + + Returns: + List[str]: List of string values matching the Tags passed in. + """ + _strings = list(list_tags.names()) + if len(_strings) > 1: + _strings.sort() + return _strings diff --git a/nautobot_ssot/jobs/__init__.py b/nautobot_ssot/jobs/__init__.py index 2d7ae96fd..8da1b6b46 100644 --- a/nautobot_ssot/jobs/__init__.py +++ b/nautobot_ssot/jobs/__init__.py @@ -9,6 +9,7 @@ from nautobot.core.settings_funcs import is_truthy from nautobot.extras.models import Job +from nautobot_ssot.exceptions import JobException from nautobot_ssot.integrations.utils import each_enabled_integration, each_enabled_integration_module from nautobot_ssot.jobs.base import DataSource, DataTarget from nautobot_ssot.jobs.examples import ExampleDataSource, ExampleDataTarget @@ -18,6 +19,7 @@ _MIN_NAUTOBOT_VERSION = { "nautobot_ssot_aci": "2.2", "nautobot_ssot_dna_center": "2.2", + "nautobot_ssot_meraki": "2.2", } @@ -28,15 +30,6 @@ jobs = [ExampleDataSource, ExampleDataTarget] -class JobException(Exception): - """Exception raised when failure loading integration Job.""" - - def __init__(self, message): - """Populate exception information.""" - self.message = message - super().__init__(self.message) - - def _check_min_nautobot_version_met(): incompatible_apps_msg = [] nautobot_version = metadata.version("nautobot") diff --git a/nautobot_ssot/jobs/base.py b/nautobot_ssot/jobs/base.py index 9e087baed..972f8952d 100644 --- a/nautobot_ssot/jobs/base.py +++ b/nautobot_ssot/jobs/base.py @@ -45,7 +45,10 @@ class DataSyncBaseJob(Job): # pylint: disable=too-many-instance-attributes - `data_source_icon` and `data_target_icon` """ - dryrun = DryRunVar(description="Perform a dry-run, making no actual changes to Nautobot data.", default=True) + dryrun = DryRunVar( + description="Perform a dry-run, making no actual changes to Nautobot data.", + default=True, + ) memory_profiling = BooleanVar(description="Perform a memory profiling analysis.", default=False) def load_source_adapter(self): @@ -96,7 +99,7 @@ def execute_sync(self): else: self.logger.warning("Not both adapters were properly initialized prior to synchronization.") - def sync_data(self, memory_profiling): + def sync_data(self, memory_profiling): # pylint: disable=too-many-statements """Method to load data from adapters, calculate diffs and sync (if not dry-run). It is composed by 4 methods: @@ -117,10 +120,16 @@ def format_size(size): # pylint: disable=inconsistent-return-statements for unit in ("B", "KiB", "MiB", "GiB", "TiB"): if abs(size) < 100 and unit != "B": # 3 digits (xx.x UNIT) - return "%.1f %s" % (size, unit) # pylint: disable=consider-using-f-string + return "%.1f %s" % ( # pylint: disable=consider-using-f-string + size, + unit, + ) if abs(size) < 10 * 1024 or unit == "TiB": # 4 or 5 digits (xxxx UNIT) - return "%.0f %s" % (size, unit) # pylint: disable=consider-using-f-string + return "%.0f %s" % ( # pylint: disable=consider-using-f-string + size, + unit, + ) size /= 1024 def record_memory_trace(step: str): @@ -150,7 +159,11 @@ def record_memory_trace(step: str): load_source_adapter_time = datetime.now() self.sync.source_load_time = load_source_adapter_time - start_time self.sync.save() - self.logger.info("Source Load Time from %s: %s", self.source_adapter, self.sync.source_load_time) + self.logger.info( + "Source Load Time from %s: %s", + self.source_adapter, + self.sync.source_load_time, + ) if memory_profiling: record_memory_trace("source_load") @@ -159,7 +172,11 @@ def record_memory_trace(step: str): load_target_adapter_time = datetime.now() self.sync.target_load_time = load_target_adapter_time - load_source_adapter_time self.sync.save() - self.logger.info("Target Load Time from %s: %s", self.target_adapter, self.sync.target_load_time) + self.logger.info( + "Target Load Time from %s: %s", + self.target_adapter, + self.sync.target_load_time, + ) if memory_profiling: record_memory_trace("target_load") @@ -172,10 +189,11 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("diff") - if self.dryrun: + if self.sync.dry_run: self.logger.info("As `dryrun` is set, skipping the actual data sync.") else: self.logger.info("Syncing from %s to %s...", self.source_adapter, self.target_adapter) + print("I'm executing the sync now") self.execute_sync() execute_sync_time = datetime.now() self.sync.sync_time = execute_sync_time - calculate_diff_time @@ -185,7 +203,11 @@ def record_memory_trace(step: str): if memory_profiling: record_memory_trace("sync") - def lookup_object(self, model_name, unique_id) -> Optional[BaseModel]: # pylint: disable=unused-argument + def lookup_object( # pylint: disable=unused-argument + self, + model_name, + unique_id, + ) -> Optional[BaseModel]: """Look up the Nautobot record, if any, identified by the args. Optional helper method used to build more detailed/accurate SyncLogEntry records from DiffSync logs. @@ -321,7 +343,10 @@ def run(self, dryrun, memory_profiling, *args, **kwargs): # pylint:disable=argu # Add _structlog_to_sync_log_entry as a processor for structlog calls from DiffSync structlog.configure( - processors=[self._structlog_to_sync_log_entry, structlog.stdlib.render_to_log_kwargs], + processors=[ + self._structlog_to_sync_log_entry, + structlog.stdlib.render_to_log_kwargs, + ], context_class=dict, logger_factory=structlog.stdlib.LoggerFactory(), wrapper_class=structlog.stdlib.BoundLogger, diff --git a/nautobot_ssot/jobs/examples.py b/nautobot_ssot/jobs/examples.py index f206ad410..2c460d2f6 100644 --- a/nautobot_ssot/jobs/examples.py +++ b/nautobot_ssot/jobs/examples.py @@ -26,6 +26,7 @@ from nautobot.tenancy.models import Tenant from nautobot_ssot.contrib import NautobotAdapter, NautobotModel +from nautobot_ssot.exceptions import MissingSecretsGroupException from nautobot_ssot.jobs.base import DataMapping, DataSource, DataTarget from nautobot_ssot.tests.contrib_base_classes import ContentTypeDict @@ -34,10 +35,6 @@ name = "SSoT Examples" # pylint: disable=invalid-name -class MissingSecretsGroupException(Exception): - """Custom Exception in case SecretsGroup is not found on ExternalIntegration.""" - - class LocationTypeModel(NautobotModel): """Shared data model representing a LocationType in either of the local or remote Nautobot instances.""" diff --git a/nautobot_ssot/migrations/0006_ssotservicenowconfig.py b/nautobot_ssot/migrations/0006_ssotservicenowconfig.py index bb81d433e..52c729307 100644 --- a/nautobot_ssot/migrations/0006_ssotservicenowconfig.py +++ b/nautobot_ssot/migrations/0006_ssotservicenowconfig.py @@ -67,5 +67,5 @@ class Migration(migrations.Migration): "abstract": False, }, ), - migrations.RunPython(_move_data), + migrations.RunPython(code=_move_data, reverse_code=migrations.RunPython.noop), ] diff --git a/nautobot_ssot/tests/bootstrap/__init__.py b/nautobot_ssot/tests/bootstrap/__init__.py new file mode 100644 index 000000000..49cf95dd0 --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/__init__.py @@ -0,0 +1 @@ +"""Unit tests for nautobot_ssot_bootstrap plugin.""" diff --git a/nautobot_ssot/tests/bootstrap/fixtures/develop.json b/nautobot_ssot/tests/bootstrap/fixtures/develop.json new file mode 100644 index 000000000..2196ce74b --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/fixtures/develop.json @@ -0,0 +1,3 @@ +{ + "git_branch": "develop" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/bootstrap/fixtures/global_settings.json b/nautobot_ssot/tests/bootstrap/fixtures/global_settings.json new file mode 100644 index 000000000..68eb82261 --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/fixtures/global_settings.json @@ -0,0 +1,757 @@ +{ + "tenant_group": [ + { + "name": "Group1", + "parent": "", + "description": "" + }, + { + "name": "Group2", + "parent": "", + "description": "" + }, + { + "name": "Group3", + "parent": "Group1", + "description": "" + } + ], + "tenant": [ + { + "name": "Backbone", + "tenant_group": "Group1", + "description": "", + "tags": [] + }, + { + "name": "Datacenter", + "tenant_group": "Group2", + "description": "", + "tags": ["Test"] + } + ], + "role": [ + { + "name": "spine_switches", + "description": "", + "color": "795548", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "leaf_switches", + "description": "", + "color": "785530", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "Switch", + "description": "", + "color": "9e9e9e", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "Firewall", + "description": "", + "color": "9e9e9e", + "content_types": [ + "dcim.device" + ] + }, + { + "name": "Data Network", + "description": "", + "color": "9e9e9e", + "content_types": [ + "ipam.prefix", + "ipam.vlan" + ] + }, + { + "name": "Administrative", + "description": "Unit plays an administrative role", + "color": "2196f3", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "Anycast", + "description": "", + "color": "ffc107", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "Billing", + "description": "Unit plays a billing role", + "color": "4caf50", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "CARP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "GLBP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "HSRP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "Loopback", + "description": "", + "color": "9e9e9e", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "On Site", + "description": "Unit plays an on site role", + "color": "111111", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "Secondary", + "description": "", + "color": "2196f3", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "Support", + "description": "Unit plays a support role", + "color": "ffeb3b", + "content_types": [ + "extras.contactassociation" + ] + }, + { + "name": "VIP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + }, + { + "name": "VRRP", + "description": "", + "color": "4caf50", + "content_types": [ + "ipam.ipaddress" + ] + } + ], + "manufacturer": [ + { + "name": "Generic", + "description": "For generic devices like patch panels" + }, + { + "name": "Palo Alto Networks", + "description": "" + }, + { + "name": "Arista", + "description": "" + }, + { + "name": "Cisco", + "description": "" + } + ], + "platform": [ + { + "name": "paloalto_panos", + "manufacturer": "Palo Alto Networks", + "network_driver": "paloalto_panos", + "napalm_driver": "", + "napalm_arguments": {}, + "description": "PanOS Firewalls" + }, + { + "name": "cisco_ios", + "manufacturer": "Cisco", + "network_driver": "cisco_ios", + "napalm_driver": "", + "napalm_arguments": {}, + "description": "Cisco Devices" + }, + { + "name": "arista_eos", + "manufacturer": "Arista", + "network_driver": "arista_eos", + "napalm_driver": "", + "napalm_arguments": {}, + "description": "Arista Devices" + } + ], + "location_type": [ + { + "name": "Region", + "nestable": true, + "description": "", + "content_types": [] + }, + { + "name": "Site", + "parent": "Region", + "nestable": false, + "description": "", + "content_types": ["dcim.device", "ipam.namespace", "ipam.prefix", "ipam.vlan", "ipam.vlangroup", "circuits.circuittermination"] + }, + { + "name": "Building", + "parent": "Site", + "nestable": false, + "description": "", + "content_types": ["dcim.device", "ipam.namespace", "ipam.prefix", "ipam.vlan", "ipam.vlangroup", "circuits.circuittermination"] + } + ], + "location": [ + { + "name": "Southeast", + "location_type": "Region", + "status": "Active", + "facility": "", + "time_zone": "US/Eastern", + "description": "", + "physical_address": "", + "shipping_address": "", + "contact_name": "", + "contact_phone": "", + "contact_email": "", + "tags": [] + }, + { + "name": "Atlanta", + "location_type": "Site", + "parent": "Southeast", + "status": "Active", + "facility": "AT1", + "asn": 65001, + "time_zone": "US/Eastern", + "description": "", + "physical_address": "180 Peachtree St NE\nFL 2 , FL 3 , FL 6\nAtlanta, GA 30303\nUnited States\n", + "shipping_address": "Example Company\n180 Peachtree St NE\nLoading Dock 1\nAtlanta, GA 30303\nUnited States\n", + "contact_name": "", + "contact_phone": "", + "contact_email": "", + "tags": [] + }, + { + "name": "Atlanta4", + "location_type": "Site", + "parent": "Southeast", + "status": "Active", + "facility": "AT4", + "asn": 65004, + "time_zone": "US/Eastern", + "description": "", + "physical_address": "450 Interstate to N PKWY\nAtlanta, GA 30339\nUnited States\n", + "shipping_address": "Example Company\n450 Interstate to N PKWY\nLoading Dock 1\nAtlanta, GA 30339\nUnited States\n", + "contact_name": "", + "contact_phone": "", + "contact_email": "", + "tags": [] + } + ], + "team": [ + { + "name": "Datacenter", + "phone": "123-456-7890", + "email": "datacenter@example.com", + "address": "2715 N Vermont Canyon Rd, Los Angeles, CA 90027" + }, + { + "name": "Backbone", + "phone": "123-456-7890", + "email": "backbone@example.com", + "address": "1600 S Azusa Ave, Rowland Heights, CA 91748" + } + ], + "contact": [ + { + "name": "Jennifer Parker", + "phone": "888-555-4823", + "email": "jenny@future.com", + "address": "12417 Philadelphia St, Whittier, CA 90601", + "teams": ["Backbone", "Datacenter"] + }, + { + "name": "Marty McFly", + "phone": "888-555-1955", + "email": "marty@future.com", + "address": "9303 Roslyndale Ave, Arleta, CA 91331", + "teams": ["Backbone"] + } + ], + "provider": [ + { + "name": "Provider1", + "asn": 65000, + "account_number": "12345678", + "portal_url": "https://provider1.com", + "noc_contact": "", + "admin_contact": "", + "tags": [] + }, + { + "name": "Provider2", + "asn": 65001, + "account_number": "87654321", + "portal_url": "https://provider2.com", + "noc_contact": "", + "admin_contact": "", + "tags": [] + } + ], + "provider_network": [ + { + "name": "Provider1 Metro-E", + "provider": "Provider1", + "description": "", + "comments": "", + "tags": [] + }, + { + "name": "Provider2 Metro-E", + "provider": "Provider2", + "description": "", + "comments": "", + "tags": [] + } + ], + "circuit_type": [ + { + "name": "Metro-E", + "description": "Metro ethernet" + }, + { + "name": "DWDM", + "description": "" + }, + { + "name": "Internet", + "description": "" + } + ], + "circuit": [ + { + "circuit_id": "METRO-65002-CUST1", + "provider": "Provider1", + "circuit_type": "Metro-E", + "status": "Active", + "commit_rate_kbps": 1000000, + "description": "", + "terminations": ["METRO-65002-CUST1__Provider1__A__A__METRO-65002-CUST1", "METRO-65002-CUST1__Provider1__Z__Z__METRO-65002-CUST1"], + "tags": [] + }, + { + "circuit_id": "INTERNET-65002-CUST1", + "provider": "Provider2", + "circuit_type": "Internet", + "status": "Active", + "commit_rate_kbps": 1000000, + "description": "", + "terminations": ["INTERNET-65002-CUST1__Provider2__A__A__INTERNET-65002-CUST1"], + "tags": [] + } + ], + "circuit_termination": [ + { + "name": "METRO-65002-CUST1__Provider1__A", + "circuit_id": "METRO-65002-CUST1", + "termination_type": "Location", + "location": "Atlanta", + "termination_side": "A", + "port_speed_kbps": 1000000, + "cross_connect_id": "", + "patch_panel_or_ports": "", + "description": "", + "tags": [] + }, + { + "name": "METRO-65002-CUST1__Provider1__Z", + "circuit_id": "METRO-65002-CUST1", + "termination_type": "Provider Network", + "provider_network": "Provider2 Metro-E", + "termination_side": "Z", + "port_speed_kbps": 1000000, + "cross_connect_id": "", + "patch_panel_or_ports": "", + "description": "", + "tags": [] + }, + { + "name": "INTERNET-65002-CUST1__Provider2__A", + "circuit_id": "INTERNET-65002-CUST1", + "termination_type": "Location", + "location": "Atlanta4", + "termination_side": "A", + "port_speed_kbps": 1000000, + "cross_connect_id": "", + "patch_panel_or_ports": "", + "description": "", + "tags": [] + } + ], + "secret": [ + { + "name": "Github_Service_Acct", + "provider": "environment-variable", + "parameters": { + "variable": "GITHUB_SERVICE_ACCT", + "path": null + } + }, + { + "name": "Github_Service_Token", + "provider": "environment-variable", + "parameters": { + "variable": "GITHUB_SERVICE_TOKEN", + "path": null + } + } + ], + "secrets_group": [ + { + "name": "Github_Service_Account", + "secrets": [ + { + "name": "Github_Service_Acct", + "secret_type": "username", + "access_type": "HTTP(S)" + }, + { + "name": "Github_Service_Token", + "secret_type": "token", + "access_type": "HTTP(S)" + } + ] + } + ], + "git_repository": [ + { + "name": "Backbone Config Contexts", + "url": "https://github.com/nautobot/backbone-config-contexts.git", + "branch": "main", + "secrets_group": "Github_Service_Account", + "provided_contents": ["config contexts"] + }, + { + "name": "Datacenter Config Contexts", + "url": "https://github.com/nautobot/datacenter-config-contexts.git", + "branch": "develop", + "secrets_group": "Github_Service_Account", + "provided_contents": ["config contexts"] + }, + { + "name": "Metro Config Contexts", + "url": "https://github.com/nautobot/metro-config-contexts.git", + "branch": "develop", + "provided_contents": ["config contexts"] + }, + { + "name": "Access Config Contexts", + "url": "https://github.com/nautobot/access-config-contexts.git", + "branch": "develop", + "provided_contents": ["config contexts"] + } + ], + "dynamic_group": [ + { + "name": "Backbone Domain", + "content_type": "dcim.device", + "dynamic_filter": { + "tenant": ["Backbone"] + }, + "description": "" + }, + { + "name": "Datacenter", + "content_type": "dcim.device", + "dynamic_filter": { + "location": ["Atlanta"], + "platform": ["arista_eos", "paloalto_panos"] + }, + "description": "" + } + ], + "computed_field": [ + { + "label": "Compliance Change", + "content_type": "dcim.device", + "template": "{{ obj | get_change_log }}" + } + ], + "tag": [ + { + "name": "Backbone", + "color": "795547", + "content_types": ["dcim.device"], + "description": "" + }, + { + "name": "Access", + "color": "795548", + "content_types": ["dcim.device", "ipam.ipaddress"], + "description": "" + }, + { + "name": "Test", + "color": "795548", + "content_types": [ + "circuits.circuit", + "circuits.circuittermination", + "circuits.provider", + "circuits.providernetwork", + "dcim.cable", + "dcim.consoleport", + "dcim.consoleserverport", + "dcim.device", + "dcim.devicebay", + "dcim.deviceredundancygroup", + "dcim.devicetype", + "dcim.frontport", + "dcim.interface", + "dcim.inventoryitem", + "dcim.location", + "dcim.powerfeed", + "dcim.poweroutlet", + "dcim.powerpanel", + "dcim.powerport", + "dcim.rack", + "dcim.rackreservation", + "dcim.rearport", + "extras.gitrepository", + "extras.job", + "extras.secret", + "ipam.ipaddress", + "ipam.namespace", + "ipam.prefix", + "ipam.routetarget", + "ipam.service", + "ipam.vlan", + "ipam.vrf", + "tenancy.tenant", + "virtualization.cluster", + "virtualization.virtualmachine", + "virtualization.vminterface" + ], + "description": "Test" + } + ], + "graph_ql_query": [ + { + "name": "Backbone Devices", + "query": "query ($device_id: ID!) {\n device(id: $device_id) {\n config_context\n hostname: name\n device_role {\n name\n }\n tenant {\n name\n }\n primary_ip4 {\n address\n }\n }\n}\n" + }, + { + "name": "Datacenter Devices", + "query": "query ($device_id: ID!) {\n device(id: $device_id) {\n config_context\n hostname: name\n device_role {\n name\n }\n tenant {\n name\n }\n primary_ip4 {\n address\n }\n }\n}\n" + } + ], + "namespace": [ + { + "name": "Global", + "location": "", + "description": "" + }, + { + "name": "Customer1", + "description": "Customer1 IPAM Namespace", + "location": "Atlanta" + }, + { + "name": "Customer2", + "description": "Customer2 IPAM Namespace", + "location": "Atlanta4" + } + ], + "rir": [ + { + "name": "RFC1918", + "private": true, + "description": "Private IP Space" + }, + { + "name": "ARIN", + "private": false, + "description": "American Registry for Internet Numbers" + } + ], + "vlan_group": [ + { + "name": "Atlanta VLANs", + "location": "Atlanta", + "description": "" + }, + { + "name": "Atlanta4 VLANs", + "location": "Atlanta4", + "description": "" + } + ], + "vlan": [ + { + "name": "vlan10", + "vid": 10, + "description": "", + "status": "Active", + "role": "Data Network", + "locations": ["Atlanta"], + "vlan_group": "Atlanta VLANs", + "tags": [] + }, + { + "name": "vlan20", + "vid": 20, + "description": "", + "status": "Reserved", + "role": "Data Network", + "locations": ["Atlanta", "Atlanta4"], + "vlan_group": "Atlanta VLANs", + "tags": [] + }, + { + "name": "vlan30", + "vid": 30, + "description": "", + "status": "Reserved", + "role": "Data Network", + "locations": [], + "vlan_group": "Atlanta VLANs", + "tags": [] + }, + { + "name": "vlan30", + "vid": 30, + "description": "", + "status": "Active", + "locations": [], + "tags": [] + } + ], + "vrf": [ + { + "name": "blue", + "namespace": "Global", + "route_distinguisher": "65000:1", + "description": "", + "tenant": "", + "tags": [] + }, + { + "name": "red", + "namespace": "Global", + "route_distinguisher": "65000:2", + "description": "", + "tenant": "", + "tags": [] + }, + { + "name": "blue", + "namespace": "Customer1", + "route_distinguisher": "65000:1", + "description": "", + "tenant": "", + "tags": [] + } + ], + "prefix": [ + { + "network": "10.0.0.0/24", + "namespace": "Customer1", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "date_allocated": "2024-06-01 00:00:00", + "description": "", + "tags": [] + }, + { + "network": "10.0.0.0/24", + "namespace": "Customer2", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "date_allocated": "2024-06-01 12:00:00", + "description": "", + "tags": [] + }, + { + "network": "10.0.10.0/24", + "namespace": "Global", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "description": "", + "locations": ["Atlanta", "Atlanta4"], + "vlan": "vlan10__10__Atlanta VLANs", + "tags": [] + }, + { + "network": "192.168.0.0/24", + "namespace": "Customer1", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "description": "", + "vrfs": ["blue__Customer1"], + "locations": ["Atlanta"], + "tags": [] + }, + { + "network": "192.168.0.0/24", + "namespace": "Global", + "prefix_type": "network", + "status": "Active", + "role": "Data Network", + "rir": "RFC1918", + "description": "", + "vrfs": ["red__Global"], + "locations": ["Atlanta"], + "tags": [] + }, + { + "network": "192.168.1.0/24", + "namespace": "Global", + "prefix_type": "network", + "status": "Active", + "description": "", + "tags": [] + } + ] +} diff --git a/nautobot_ssot/tests/bootstrap/fixtures/production.json b/nautobot_ssot/tests/bootstrap/fixtures/production.json new file mode 100644 index 000000000..bb7d95b8a --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/fixtures/production.json @@ -0,0 +1,3 @@ +{ + "git_branch": "production" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py b/nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py new file mode 100644 index 000000000..6ac8712bc --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/test_bootstrap_adapter.py @@ -0,0 +1,158 @@ +"""Tests for Bootstrap adapter.""" + +import json +from datetime import datetime +from unittest.mock import MagicMock + +import yaml +from deepdiff import DeepDiff +from nautobot.core.testing import TransactionTestCase +from nautobot.extras.models import JobResult + +from nautobot_ssot.integrations.bootstrap.diffsync.adapters.bootstrap import ( + BootstrapAdapter, +) +from nautobot_ssot.integrations.bootstrap.jobs import BootstrapDataSource + +from .test_setup import ( + DEVELOP_YAML_SETTINGS, + GLOBAL_JSON_SETTINGS, + GLOBAL_YAML_SETTINGS, + MODELS_TO_SYNC, +) + + +def load_yaml(path): + """Load a yaml file.""" + with open(path, encoding="utf-8") as file: + return yaml.safe_load(file.read()) + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +def assert_deep_diff(test_case, actual, expected, keys_to_normalize=None): + # pylint: disable=duplicate-code + """Custom DeepDiff assertion handling.""" + keys_to_normalize = keys_to_normalize or {} + + def normalize(item): # pylint: disable=too-many-branches + if isinstance(item, list): + return [normalize(i) for i in item] + if isinstance(item, dict): + for key in list(item.keys()): + if key in ["system_of_record", "model_flags", "uuid"]: + item.pop(key, None) + elif key in keys_to_normalize and (item.get(key) is None or item.get(key) == ""): + item[key] = None + if ( + key + in [ + "weight", + "parent", + "date_installed", + "asn", + "latitude", + "longitude", + "tenant", + "terminations", + ] + and item.get(key) is None + ): + item.pop(key, None) + if key == "parameters": + if "path" not in item[key]: + item[key]["path"] = None + if key == "path" and item.get(key) is None: + item[key] = None + if key == "content_types" or key == "provided_contents" and isinstance(item[key], list): + item[key] = sorted(["config contexts" if v == "extras.configcontext" else v for v in item[key]]) + if key == "date_allocated": + if item.get(key) is not None: + # Normalize the format to 'YYYY-MM-DD HH:MM:SS' for consistency + if isinstance(item[key], datetime): + item[key] = item[key].isoformat(sep=" ") + elif isinstance(item[key], str) and len(item[key]) == 10: + # Convert 'YYYY-MM-DD' format to 'YYYY-MM-DD 00:00:00' + item[key] += " 00:00:00" + if key == "prefix": + # Sort prefixes based on network and namespace as unique identifiers + item[key] = sorted(item[key], key=lambda x: (x["network"], x["namespace"])) + return {k: normalize(v) for k, v in item.items()} # pylint: disable=duplicate-code + return item + + actual_normalized = normalize(actual) + expected_normalized = normalize(expected) + + diff = DeepDiff( + actual_normalized, + expected_normalized, + ignore_order=True, + ignore_string_type_changes=True, + exclude_regex_paths=r"root\[\d+\]\['terminations'\]", + ) + + print("Actual Normalization", actual_normalized) + print("Expected Normalization", expected_normalized) + + if diff: + print("Differences found:") + print(diff) + + test_case.assertEqual(diff, {}) + + +class TestBootstrapAdapterTestCase(TransactionTestCase): + """Test NautobotSsotBootstrapAdapter class.""" + + databases = ("default", "job_logs") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.max_diff = None + + def setUp(self): + """Initialize test case.""" + self.job = BootstrapDataSource() + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, task_name="fake task", worker="default" + ) + + self.bootstrap_client = MagicMock() + self.bootstrap_client.get_global_settings.return_value = GLOBAL_YAML_SETTINGS + self.bootstrap_client.get_develop_settings.return_value = DEVELOP_YAML_SETTINGS + self.bootstrap_client.get_production_settings.return_value = GLOBAL_YAML_SETTINGS + + self.bootstrap = BootstrapAdapter(job=self.job, sync=None, client=self.bootstrap_client) + + def test_develop_settings(self): + self.assertEqual(self.bootstrap_client.get_develop_settings(), DEVELOP_YAML_SETTINGS) + + def test_production_settings(self): + self.assertEqual(self.bootstrap_client.get_production_settings(), GLOBAL_YAML_SETTINGS) + + def test_data_loading(self): + """Test Nautobot Ssot Bootstrap load() function.""" + self.bootstrap.load() + # self.maxDiff = None + # pylint: disable=duplicate-code + for key in MODELS_TO_SYNC: + print(f"Checking: {key}") + assert_deep_diff( + self, + list(self.bootstrap.dict().get(key, {}).values()), + GLOBAL_JSON_SETTINGS.get(key, []), + keys_to_normalize={ + "parent", + "nestable", + "tenant", + "tenant_group", + "terminations", + "provider_network", + "upstream_speed_kbps", + "location", + }, + ) diff --git a/nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py b/nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py new file mode 100644 index 000000000..04cba638a --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/test_nautobot_adapter.py @@ -0,0 +1,128 @@ +"""Testing that objects are properly loaded from Nautobot into Nautobot adapter.""" + +# test_nautobot_adapter.py + +from datetime import datetime + +from deepdiff import DeepDiff +from django.test import TransactionTestCase + +from .test_setup import ( + GLOBAL_JSON_SETTINGS, + MODELS_TO_SYNC, + NautobotTestSetup, +) + + +def assert_nautobot_deep_diff(test_case, actual, expected, keys_to_normalize=None): + # pylint: disable=duplicate-code + """Custom DeepDiff assertion handling.""" + keys_to_normalize = keys_to_normalize or {} + + def normalize(item, key=None): + if isinstance(item, list): + if key == "vrf": + return sorted( + [normalize(i, key) for i in item], + key=lambda x: (x.get("name", ""), x.get("namespace", "")), + ) + return [normalize(i, key) for i in item] + + if isinstance(item, dict): + for item_key in list(item.keys()): + if item_key in ["system_of_record", "model_flags", "uuid"]: + item.pop(item_key, None) + elif item_key in ["secrets_group"] and "secrets_group" not in item: + item[item_key] = None + elif item_key in keys_to_normalize and (item.get(item_key) is None or item.get(item_key) == ""): + item[item_key] = None + + if ( + item_key + in [ + "weight", + "parent", + "date_installed", + "asn", + "latitude", + "longitude", + "tenant", + "terminations", + ] + and item.get(item_key) is None + ): + item.pop(item_key, None) + + if item_key == "content_types" or item_key == "provided_contents" and isinstance(item[item_key], list): + item[item_key] = sorted(item[item_key]) + + if item_key == "date_allocated" and not item.get(item_key): + item.pop(item_key, None) + + if item_key == "parameters" and "path" not in item: + item["path"] = None + + if isinstance(item.get(item_key), datetime): + item[item_key] = item[item_key].isoformat(sep=" ") + + return {k: normalize(v, k) for k, v in item.items()} + return item + + actual_normalized = normalize(actual) + expected_normalized = normalize(expected) + + diff = DeepDiff( + actual_normalized, + expected_normalized, + ignore_order=True, + ignore_string_type_changes=True, + exclude_regex_paths=r"root\[\d+\]\['terminations'\]", + ) + + print("Actual Normalization", actual_normalized) + print("Expected Normalization", expected_normalized) + + if diff: + print("Differences found:") + print(diff) + + test_case.assertEqual(diff, {}) + + +class TestNautobotAdapterTestCase(TransactionTestCase): + """Test NautobotAdapter class.""" + + databases = ("default", "job_logs") + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + # self.maxDiff = None + + def setUp(self): + """Initialize test case.""" + super().setUp() + self.setup = NautobotTestSetup() + self.nb_adapter = self.setup.nb_adapter + + def test_data_loading(self): + """Test SSoT Bootstrap Nautobot load() function.""" + self.nb_adapter.load() + # self.maxDiff = None + # pylint: disable=duplicate-code + for key in MODELS_TO_SYNC: + print(f"Checking: {key}") + assert_nautobot_deep_diff( + self, + list(self.nb_adapter.dict().get(key, {}).values()), + GLOBAL_JSON_SETTINGS.get(key, []), + keys_to_normalize={ + "parent", + "nestable", + "tenant", + "tenant_group", + "terminations", + "provider_network", + "upstream_speed_kbps", + "location", + }, + ) diff --git a/nautobot_ssot/tests/bootstrap/test_setup.py b/nautobot_ssot/tests/bootstrap/test_setup.py new file mode 100644 index 000000000..8a56c11e7 --- /dev/null +++ b/nautobot_ssot/tests/bootstrap/test_setup.py @@ -0,0 +1,959 @@ +# pylint: disable=too-many-lines +"""Setup/Create Nautobot objects for use in unit testing.""" + +import json +import os +from unittest.mock import MagicMock + +import pytz +import yaml +from django.contrib.contenttypes.models import ContentType +from django.utils.text import slugify +from nautobot.circuits.models import ( + Circuit, + CircuitTermination, + CircuitType, + Provider, + ProviderNetwork, +) +from nautobot.dcim.models import ( + Device, + DeviceType, + InventoryItem, + Location, + LocationType, + Manufacturer, + Platform, +) +from nautobot.extras.models import ( + ComputedField, + Contact, + DynamicGroup, + GitRepository, + GraphQLQuery, + JobResult, + Role, + Secret, + SecretsGroup, + SecretsGroupAssociation, + Status, + Tag, + Team, +) +from nautobot.ipam.models import RIR, VLAN, VRF, Namespace, Prefix, VLANGroup +from nautobot.tenancy.models import Tenant, TenantGroup +from nautobot_device_lifecycle_mgmt.models import ( + SoftwareImageLCM, + SoftwareLCM, + ValidatedSoftwareLCM, +) + +from nautobot_ssot.integrations.bootstrap.diffsync.adapters.bootstrap import ( + BootstrapAdapter, +) +from nautobot_ssot.integrations.bootstrap.diffsync.adapters.nautobot import ( + NautobotAdapter, +) +from nautobot_ssot.integrations.bootstrap.jobs import BootstrapDataSource + + +def load_yaml(path): + """Load a yaml file.""" + with open(path, encoding="utf-8") as file: + return yaml.safe_load(file.read()) + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +FIXTURES_DIR = os.path.join("./nautobot_ssot/integrations/bootstrap/fixtures") +GLOBAL_YAML_SETTINGS = load_yaml(os.path.join(FIXTURES_DIR, "global_settings.yml")) +DEVELOP_YAML_SETTINGS = load_yaml(os.path.join(FIXTURES_DIR, "develop.yml")) + +TESTS_FIXTURES_DIR = os.path.join("./nautobot_ssot/tests/bootstrap/fixtures") +GLOBAL_JSON_SETTINGS = load_json(os.path.join(TESTS_FIXTURES_DIR, "global_settings.json")) + +MODELS_TO_SYNC = [ + "tenant_group", + "tenant", + "role", + "manufacturer", + "platform", + "location_type", + "location", + "team", + "contact", + "provider", + "provider_network", + "circuit_type", + "circuit", + "circuit_termination", + "secret", + "secrets_group", + "git_repository", + "dynamic_group", + "computed_field", + "tag", + "graph_ql_query", + "software", + "software_image", + "validated_software", + "namespace", + "rir", + "vlan_group", + "vlan", + "vrf", + "prefix", +] + + +def is_valid_timezone(timezone): + """Return whether timezone passed is a valid timezone in pytz.""" + try: + pytz.timezone(timezone) + return True + except pytz.UnknownTimeZoneError: + return False + + +class PrefixInfo: + """Definition for a prefix object""" + + def __init__(self, namespace, prefix_type, role, rir, vlan, tenant): # pylint: disable=too-many-arguments + self.namespace = namespace + self.prefix_type = prefix_type + self.role = role + self.rir = rir + self.vlan = vlan + self.tenant = tenant + + +class NautobotTestSetup: + """Setup basic database information to be used in other tests.""" + + def __init__(self): + self.job = BootstrapDataSource() + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, task_name="fake task", worker="default" + ) + self.nb_adapter = NautobotAdapter(job=self.job, sync=None) + self.nb_adapter.job = MagicMock() + self.nb_adapter.job.logger.info = MagicMock() + self.bs_adapter = BootstrapAdapter(job=self.job, sync=None) + self.bs_adapter.job = MagicMock() + self.bs_adapter.job.logger.info = MagicMock() + self.status_active = None + self._initialize_data() + + def _initialize_data(self): + self._setup_tags() + self._setup_status() + self._setup_locations() + self._setup_tenant_groups() + self._setup_tenants() + self._setup_roles() + self._setup_teams() + self._setup_contacts() + self._setup_providers() + self._setup_provider_networks() + self._setup_circuit_types() + self._setup_circuits() + self._setup_circuit_terminations() + self._setup_manufacturers() + self._setup_platforms() + self._setup_device_types_and_devices() + self._setup_inventory_items() + self._setup_secrets_and_groups() + self._setup_computed_fields() + self._setup_graphql_queries() + self._setup_git_repositories() + self._setup_dynamic_groups() + self._setup_namespaces() + self._setup_rirs() + self._setup_vlan_groups() + self._setup_vlans() + self._setup_vrfs() + self._setup_prefixes() + self._setup_software_and_images() + self._setup_validated_software() + + def _setup_tags(self): + for _tag in GLOBAL_YAML_SETTINGS["tag"]: + _content_types = [] + for _con_type in _tag["content_types"]: + _content_types.append( + ContentType.objects.get(app_label=_con_type.split(".")[0], model=_con_type.split(".")[1]) + ) + _new_tag = Tag.objects.create( + name=_tag["name"], + description=_tag["description"], + color=_tag["color"] if not None else "9e9e9e", + ) + _new_tag.custom_field_data["system_of_record"] = "Bootstrap" + _new_tag.validated_save() + _new_tag.content_types.set(_content_types) + _new_tag.validated_save() + _new_tag.refresh_from_db() + + def _setup_status(self): + _statuses = ["Reserved"] + self.status_active, _ = Status.objects.get_or_create(name="Active") + self.status_active.save() + _content_types = [ + "circuits.circuit", + "dcim.location", + "dcim.device", + "ipam.prefix", + "ipam.namespace", + "ipam.vrf", + "ipam.vlan", + "ipam.ipaddress", + ] + for _content_type in _content_types: + _con_type = ContentType.objects.get( + app_label=_content_type.split(".", maxsplit=1)[0], + model=_content_type.split(".")[1], + ) + self.status_active.content_types.add(_con_type) + self.status_active.refresh_from_db() + for _status in _statuses: + status, _ = Status.objects.get_or_create(name=_status) + for _content_type in _content_types: + _con_type = ContentType.objects.get( + app_label=_content_type.split(".", maxsplit=1)[0], + model=_content_type.split(".")[1], + ) + status.content_types.add(_con_type) + status.validated_save() + + def _setup_locations(self): + """Set up location types and locations.""" + + # First, ensure location types are created + location_types_data = GLOBAL_YAML_SETTINGS.get("location_type", []) + for loc_type_data in location_types_data: + location_type = self._get_or_create_location_type(loc_type_data) + self._set_location_type_content_types(location_type, loc_type_data["content_types"]) + + locations_data = GLOBAL_YAML_SETTINGS.get("location", []) + for location_data in locations_data: + location_type = LocationType.objects.get(name=location_data["location_type"]) + parent_location = None + tenant = None + tags = [] + + status = Status.objects.get(name=location_data["status"]) + + if location_data["parent"]: + parent_location = Location.objects.get(name=location_data["parent"]) + + if location_data["tenant"]: + tenant = Tenant.objects.get(name=location_data["tenant"]) + + if location_data["tags"]: + tags = [Tag.objects.get(name=tag) for tag in location_data["tags"]] + + location, created = Location.objects.get_or_create( + name=location_data["name"], + location_type=location_type, + defaults={ + "parent": parent_location, + "status": status, + "facility": location_data.get("facility", ""), + "asn": location_data.get("asn"), + "time_zone": location_data.get("time_zone", ""), + "description": location_data.get("description", ""), + "tenant": tenant, + "physical_address": location_data.get("physical_address", ""), + "shipping_address": location_data.get("shipping_address", ""), + "latitude": location_data.get("latitude"), + "longitude": location_data.get("longitude"), + "contact_name": location_data.get("contact_name", ""), + "contact_phone": location_data.get("contact_phone", ""), + "contact_email": location_data.get("contact_email", ""), + "tags": tags, + }, + ) + if created: + location.validated_save() + + def _get_or_create_location_type(self, location_type_data): + """Get or create a LocationType based on the provided data.""" + parent = self._get_location_type_parent(location_type_data["parent"]) + try: + return LocationType.objects.get(name=location_type_data["name"], parent=parent) + except LocationType.DoesNotExist: + return LocationType.objects.create( + name=location_type_data["name"], + parent=parent, + nestable=location_type_data.get("nestable"), + description=location_type_data["description"], + ) + + def _get_location_type_parent(self, parent_name): + """Retrieve the parent LocationType if it exists.""" + if parent_name: + try: + return LocationType.objects.get(name=parent_name) + except LocationType.DoesNotExist: + self.job.logger.warning(f"Parent LocationType '{parent_name}' does not exist.") + return None + return None + + def _set_location_type_content_types(self, location_type, content_types): + """Set the content types for a LocationType.""" + content_type_objects = [ + ContentType.objects.get(app_label=ct.split(".")[0], model=ct.split(".")[1]) for ct in content_types + ] + location_type.content_types.set(content_type_objects) + location_type.custom_field_data["system_of_record"] = "Bootstrap" + location_type.save() + + def _setup_tenant_groups(self): + _tenant_groups = GLOBAL_YAML_SETTINGS.get("tenant_group", []) + for _tenant_group in _tenant_groups: + if _tenant_group["parent"]: + _parent = TenantGroup.objects.get(name=_tenant_group["parent"]) + _tenant_group = TenantGroup.objects.create(name=_tenant_group["name"], parent=_parent) + else: + _tenant_group = TenantGroup.objects.create(name=_tenant_group["name"]) + _tenant_group.custom_field_data["system_of_record"] = "Bootstrap" + _tenant_group.validated_save() + _tenant_group.refresh_from_db() + + def _setup_tenants(self): + _tenants = GLOBAL_YAML_SETTINGS.get("tenant", []) + for _ten in _tenants: + _tenant_group = None + if _ten["tenant_group"]: + _tenant_group = TenantGroup.objects.get(name=_ten["tenant_group"]) + _tenant = Tenant.objects.create( + name=_ten["name"], + description=_ten["description"], + tenant_group=_tenant_group, + ) + _tenant.custom_field_data["system_of_record"] = "Bootstrap" + if _ten["tags"]: + for _tag in _ten["tags"]: + _tenant.tags.add(Tag.objects.get(name=_tag)) + _tenant.validated_save() + _tenant.refresh_from_db() + + def _setup_roles(self): + _con_types = [] + _roles = GLOBAL_YAML_SETTINGS["role"] + # _roles.remove(["Administrative", "Anycast", "Billing", "CARP", "GLBP", "HSRP", "Loopback", "On Site", ]) + for _role in _roles: + for _type in _role["content_types"]: + _con_types.append(ContentType.objects.get(app_label=_type.split(".")[0], model=_type.split(".")[1])) + _r, created = Role.objects.get_or_create( + name=_role["name"], + color=_role["color"], + description=_role["description"], + ) + if created: + _r.content_types.set(_con_types) + _r.custom_field_data["system_of_record"] = "Bootstrap" + _r.validated_save() + _con_types.clear() + + def _setup_teams(self): + for _team in GLOBAL_YAML_SETTINGS["team"]: + team = Team.objects.create( + name=_team["name"], + phone=_team["phone"], + email=_team["email"], + address=_team["address"], + ) + team.custom_field_data["system_of_record"] = "Bootstrap" + team.validated_save() + + def _setup_contacts(self): + for _contact in GLOBAL_YAML_SETTINGS["contact"]: + contact = Contact.objects.create( + name=_contact["name"], + phone=_contact["phone"], + email=_contact["email"], + address=_contact["address"], + ) + contact.validated_save() + for _team in _contact["teams"]: + contact.teams.add(Team.objects.get(name=_team)) + contact.custom_field_data["system_of_record"] = "Bootstrap" + contact.validated_save() + + def _setup_providers(self): + for _provider in GLOBAL_YAML_SETTINGS["provider"]: + provider = Provider.objects.create( + name=_provider["name"], + asn=_provider["asn"], + account=_provider["account_number"], + portal_url=_provider["portal_url"], + noc_contact=_provider["noc_contact"], + admin_contact=_provider["admin_contact"], + ) + provider.validated_save() + for _tag in _provider["tags"]: + _t = Tag.objects.get(name=_tag) + provider.tags.append(_t) + provider.custom_field_data["system_of_record"] = "Bootstrap" + provider.validated_save() + + def _setup_provider_networks(self): + for _provider_network in GLOBAL_YAML_SETTINGS["provider_network"]: + _provider = Provider.objects.get(name=_provider_network["provider"]) + provider_network = ProviderNetwork.objects.create( + name=_provider_network["name"], + provider=_provider, + description=_provider_network["description"], + comments=_provider_network["comments"], + ) + provider_network.validated_save() + for _tag in _provider_network["tags"]: + _t = Tag.objects.get(name=_tag) + provider_network.tags.append(_t) + provider_network.custom_field_data["system_of_record"] = "Bootstrap" + provider_network.validated_save() + + def _setup_circuit_types(self): + for _circuit_type in GLOBAL_YAML_SETTINGS["circuit_type"]: + circuit_type = CircuitType( + name=_circuit_type["name"], + description=_circuit_type["description"], + ) + circuit_type.custom_field_data["system_of_record"] = "Bootstrap" + circuit_type.validated_save() + + def _setup_circuits(self): + for _circuit in GLOBAL_YAML_SETTINGS["circuit"]: + _provider = Provider.objects.get(name=_circuit["provider"]) + _circuit_type = CircuitType.objects.get(name=_circuit["circuit_type"]) + _status = Status.objects.get(name=_circuit["status"]) + _tenant = None + if _circuit["tenant"]: + if _circuit["tenant"] is not None: + _tenant = Tenant.objects.get(name=_circuit["tenant"]) + circuit = Circuit( + cid=_circuit["circuit_id"], + provider=_provider, + circuit_type=_circuit_type, + status=_status, + commit_rate=_circuit["commit_rate_kbps"], + description=_circuit["description"], + tenant=_tenant, + ) + circuit.validated_save() + for _tag in _circuit["tags"]: + _t = Tag.objects.get(name=_tag) + circuit.tags.append(_t) + circuit.custom_field_data["system_of_record"] = "Bootstrap" + circuit.validated_save() + + def _setup_circuit_terminations(self): + for _circuit_termination in GLOBAL_YAML_SETTINGS["circuit_termination"]: + _name_parts = _circuit_termination["name"].split("__", 2) + _circuit_id = _name_parts[0] + _provider_name = _name_parts[1] + _term_side = _name_parts[2] + _provider = Provider.objects.get(name=_provider_name) + _circuit = Circuit.objects.get(cid=_circuit_id, provider=_provider) + + if _circuit_termination["termination_type"] == "Provider Network": + _provider_network = ProviderNetwork.objects.get(name=_circuit_termination["provider_network"]) + circuit_termination = CircuitTermination.objects.create( + provider_network=_provider_network, + circuit=_circuit, + term_side=_term_side, + xconnect_id=_circuit_termination["cross_connect_id"], + pp_info=_circuit_termination["patch_panel_or_ports"], + description=_circuit_termination["description"], + upstream_speed=_circuit_termination["upstream_speed_kbps"], + port_speed=_circuit_termination["port_speed_kbps"], + ) + if _circuit_termination["termination_type"] == "Location": + _location = Location.objects.get(name=_circuit_termination["location"]) + circuit_termination = CircuitTermination.objects.create( + location=_location, + circuit=_circuit, + term_side=_term_side, + xconnect_id=_circuit_termination["cross_connect_id"], + pp_info=_circuit_termination["patch_panel_or_ports"], + description=_circuit_termination["description"], + upstream_speed=_circuit_termination["upstream_speed_kbps"], + port_speed=_circuit_termination["port_speed_kbps"], + ) + circuit_termination.custom_field_data["system_of_record"] = "Bootstrap" + circuit_termination.validated_save() + if _circuit_termination["tags"]: + for _tag in _circuit_termination["tags"]: + circuit_termination.tags.add(Tag.objects.get(name=_tag)) + + def _setup_namespaces(self): + for _namespace in GLOBAL_YAML_SETTINGS["namespace"]: + _location = None + if _namespace["location"]: + _location = Location.objects.get(name=_namespace["location"]) + namespace, _ = Namespace.objects.get_or_create( + name=_namespace["name"], + location=_location, + ) + namespace.description = _namespace["description"] + namespace.custom_field_data["system_of_record"] = "Bootstrap" + namespace.validated_save() + + def _setup_rirs(self): + for _rir in GLOBAL_YAML_SETTINGS["rir"]: + rir, _ = RIR.objects.get_or_create( + name=_rir["name"], + ) + rir.is_private = _rir["private"] + rir.description = _rir["description"] + rir.custom_field_data["system_of_record"] = "Bootstrap" + rir.validated_save() + + def _setup_vlan_groups(self): + for _vlan_group in GLOBAL_YAML_SETTINGS["vlan_group"]: + _location = None + if _vlan_group["location"]: + _location = Location.objects.get(name=_vlan_group["location"]) + vlan_group, _ = VLANGroup.objects.get_or_create(name=_vlan_group["name"], location=_location) + vlan_group.description = _vlan_group["description"] + vlan_group.custom_field_data["system_of_record"] = "Bootstrap" + vlan_group.validated_save() + + def _setup_vlans(self): + for _vlan in GLOBAL_YAML_SETTINGS["vlan"]: + _role = None + _locations = [] + _vlan_group = None + _tenant = None + _tags = [] + _status = self.status_active + if _vlan["status"] and _vlan["status"] != "Active": + _status = Status.objects.get(name=_vlan["status"]) + if _vlan["role"]: + _role = Role.objects.get(name=_vlan["role"]) + if _vlan["locations"]: + for _l in _vlan["locations"]: + _locations.append(Location.objects.get(name=_l)) + if _vlan["vlan_group"]: + _vlan_group = VLANGroup.objects.get(name=_vlan["vlan_group"]) + if _vlan["tenant"]: + _tenant = Tenant.objects.get(name=_vlan["tenant"]) + if _vlan["tags"]: + for _t in _vlan["tags"]: + _tags.append(Tag.objects.get(name=_t)) + vlan, _ = VLAN.objects.get_or_create( + vid=_vlan["vid"], + name=_vlan["name"], + vlan_group=_vlan_group, + status=_status, + ) + vlan.role = _role + vlan.locations.set(_locations) + vlan.tenant = _tenant + vlan.description = _vlan["description"] + vlan.custom_field_data["system_of_record"] = "Bootstrap" + vlan.validated_save() + vlan.tags.set(_tags) + + def _setup_vrfs(self): + for _vrf in GLOBAL_YAML_SETTINGS["vrf"]: + _namespace = None + _tenant = None + _tags = [] + if _vrf["namespace"]: + _namespace = Namespace.objects.get(name=_vrf["namespace"]) + if _vrf["tenant"]: + _tenant = Tenant.objects.get(name=_vrf["tenant"]) + if _vrf["tags"]: + for _t in _vrf["tags"]: + _tags.append(Tag.objects.get(name=_t)) + vrf, _ = VRF.objects.get_or_create( + name=_vrf["name"], + namespace=_namespace, + ) + vrf.rd = _vrf["route_distinguisher"] + vrf.description = _vrf["description"] + vrf.tenant = _tenant + vrf.custom_field_data["system_of_record"] = "Bootstrap" + vrf.tags.set(_tags) + vrf.validated_save() + + def _setup_prefixes(self): + for prefix_data in GLOBAL_YAML_SETTINGS["prefix"]: + namespace = self._get_namespace(prefix_data) + prefix_type = self._get_prefix_type(prefix_data) + role = self._get_role(prefix_data) + rir = self._get_rir(prefix_data) + vrfs = self._get_vrfs(prefix_data) + locations = self._get_locations(prefix_data) + vlan = self._get_vlan(prefix_data) + tenant = self._get_tenant(prefix_data) + tags = self._get_prefix_tags(prefix_data) + + prefix_info = PrefixInfo(namespace, prefix_type, role, rir, vlan, tenant) + prefix = self._get_or_create_prefix(prefix_data, prefix_info) + self._update_prefix(prefix, locations, vrfs, tags) + + def _get_namespace(self, prefix_data): + if prefix_data["namespace"] and prefix_data["namespace"] != "Global": + return Namespace.objects.get(name=prefix_data["namespace"]) + return Namespace.objects.get(name="Global") + + def _get_prefix_type(self, prefix_data): + if prefix_data["prefix_type"] and prefix_data["prefix_type"] != "network": + return prefix_data["prefix_type"] + return "network" + + def _get_role(self, prefix_data): + if prefix_data["role"]: + return Role.objects.get(name=prefix_data["role"]) + return None + + def _get_rir(self, prefix_data): + if prefix_data["rir"]: + return RIR.objects.get(name=prefix_data["rir"]) + return None + + def _get_vrfs(self, prefix_data): + vrfs = [] + if prefix_data["vrfs"]: + for vrf in prefix_data["vrfs"]: + namespace = Namespace.objects.get(name=vrf.split("__")[1]) + vrfs.append(VRF.objects.get(name=vrf.split("__")[0], namespace=namespace)) + return vrfs + + def _get_locations(self, prefix_data): + locations = [] + if prefix_data["locations"]: + for loc in prefix_data["locations"]: + locations.append(Location.objects.get(name=loc)) + return locations + + def _get_vlan(self, prefix_data): + if prefix_data["vlan"]: + name, vid, group = prefix_data["vlan"].split("__", 2) + vlan_group = VLANGroup.objects.get(name=group) if group else None + return VLAN.objects.get(name=name, vid=vid, vlan_group=vlan_group) + return None + + def _get_tenant(self, prefix_data): + if prefix_data["tenant"]: + return Tenant.objects.get(name=prefix_data["tenant"]) + return None + + def _get_prefix_tags(self, prefix_data): + tags = [] + if prefix_data["tags"]: + for tag in prefix_data["tags"]: + tags.append(Tag.objects.get(name=tag)) + return tags + + def _get_or_create_prefix(self, prefix_data, prefix_info): + try: + return Prefix.objects.get( + network=prefix_data["network"].split("/")[0], + prefix_length=prefix_data["network"].split("/")[1], + namespace=prefix_info.namespace, + type=prefix_info.prefix_type, + ) + except Prefix.DoesNotExist: + return Prefix.objects.create( + network=prefix_data["network"].split("/")[0], + prefix_length=prefix_data["network"].split("/")[1], + namespace=prefix_info.namespace, + type=prefix_info.prefix_type, + status=Status.objects.get(name=prefix_data["status"]), + role=prefix_info.role, + rir=prefix_info.rir, + date_allocated=prefix_data["date_allocated"], + description=prefix_data["description"], + vlan=prefix_info.vlan, + tenant=prefix_info.tenant, + ) + + def _update_prefix(self, prefix, locations, vrfs, tags): + prefix.custom_field_data["system_of_record"] = "Bootstrap" + prefix.validated_save() + for loc in locations: + prefix.locations.add(loc) + for vrf in vrfs: + prefix.vrfs.add(vrf) + for tag in tags: + prefix.tags.add(tag) + prefix.validated_save() + + def _setup_manufacturers(self): + for _manufacturer in GLOBAL_YAML_SETTINGS["manufacturer"]: + _manufac = Manufacturer.objects.create(name=_manufacturer["name"], description=_manufacturer["description"]) + _manufac.custom_field_data["system_of_record"] = "Bootstrap" + _manufac.validated_save() + + def _setup_platforms(self): + for _platform in GLOBAL_YAML_SETTINGS["platform"]: + _manufac = Manufacturer.objects.get(name=_platform["manufacturer"]) + _platf = Platform.objects.create( + name=_platform["name"], + manufacturer=_manufac, + description=_platform["description"], + network_driver=_platform["network_driver"], + napalm_args=_platform["napalm_arguments"], + napalm_driver=_platform["napalm_driver"], + ) + _platf.custom_field_data["system_of_record"] = "Bootstrap" + _platf.validated_save() + + def _setup_device_types_and_devices(self): + _device_types = [ + {"model": "WS3850-24P", "manufacturer": "Cisco"}, + {"model": "PA-820", "manufacturer": "Palo Alto Networks"}, + ] + _devices = [ + { + "name": "Switch1", + "manufacturer": "Cisco", + "platform": "cisco_ios", + "location": "Atlanta", + "device_type": "WS3850-24P", + "role": "Switch", + }, + { + "name": "Firewall1", + "manufacturer": "Palo Alto Networks", + "platform": "paloalto_panos", + "location": "Atlanta", + "device_type": "PA-820", + "role": "Firewall", + }, + ] + + for _dev_type in _device_types: + _manufacturer = Manufacturer.objects.get(name=_dev_type["manufacturer"]) + _dev_type = DeviceType.objects.create(model=_dev_type["model"], manufacturer=_manufacturer) + _dev_type.custom_field_data["system_of_record"] = "Bootstrap" + _dev_type.validated_save() + + for _dev in _devices: + _manufacturer = Manufacturer.objects.get(name=_dev["manufacturer"]) + _platform = Platform.objects.get(name=_dev["platform"]) + _dev_type = DeviceType.objects.get(model=_dev["device_type"]) + _role = Role.objects.get(name=_dev["role"]) + _site = Location.objects.get(name=_dev["location"]) + _device = Device.objects.create( + name=_dev["name"], + platform=_platform, + device_type=_dev_type, + status=self.status_active, + role=_role, + location=_site, + ) + _device.custom_field_data["system_of_record"] = "Bootstrap" + _device.save() + _device.refresh_from_db() + + def _setup_inventory_items(self): + _inventory_items = [{"name": "sfp-module", "device": "Switch1", "manufacturer": "Cisco"}] + for _inv_item in _inventory_items: + _dev = Device.objects.get(name=_inv_item["device"]) + _manufacturer = Manufacturer.objects.get(name=_inv_item["manufacturer"]) + _inventory_item = InventoryItem.objects.create( + name=_inv_item["name"], device=_dev, manufacturer=_manufacturer + ) + _inventory_item.custom_field_data["system_of_record"] = "Bootstrap" + _inventory_item.save() + _inventory_item.refresh_from_db() + + def _setup_secrets_and_groups(self): + for _sec in GLOBAL_YAML_SETTINGS["secret"]: + _secret = Secret.objects.create( + name=_sec["name"], + provider=_sec["provider"], + parameters=_sec["parameters"], + ) + _secret.custom_field_data["system_of_record"] = "Bootstrap" + _secret.save() + _secret.refresh_from_db() + + for _sec_group in GLOBAL_YAML_SETTINGS["secrets_group"]: + _secrets_group = SecretsGroup.objects.create(name=_sec_group["name"]) + _secrets_group.custom_field_data["system_of_record"] = "Bootstrap" + _secrets_group.validated_save() + _secrets_group.refresh_from_db() + for _sec in _sec_group["secrets"]: + _sga = SecretsGroupAssociation.objects.create( + secrets_group=_secrets_group, + secret=Secret.objects.get(name=_sec["name"]), + access_type=_sec["access_type"], + secret_type=_sec["secret_type"], + ) + _sga.validated_save() + _sga.refresh_from_db() + + def _setup_computed_fields(self): + for _comp_field in GLOBAL_YAML_SETTINGS["computed_field"]: + _content_type = ContentType.objects.get( + app_label=_comp_field["content_type"].split(".")[0], + model=_comp_field["content_type"].split(".")[1], + ) + _computed_field = ComputedField.objects.create( + label=_comp_field["label"], + content_type=_content_type, + template=_comp_field["template"], + ) + _computed_field.save() + _computed_field.refresh_from_db() + + def _setup_graphql_queries(self): + for _gql_query in GLOBAL_YAML_SETTINGS["graph_ql_query"]: + _qglq = GraphQLQuery.objects.create(name=_gql_query["name"], query=_gql_query["query"]) + _qglq.save() + _qglq.refresh_from_db() + + def _setup_git_repositories(self): + for _repo in GLOBAL_YAML_SETTINGS["git_repository"]: + if _repo.get("branch"): + _git_branch = _repo["branch"] + else: + _git_branch = DEVELOP_YAML_SETTINGS["git_branch"] + _secrets_group = None + if _repo.get("secrets_group_name"): + _secrets_group = SecretsGroup.objects.get(name=_repo["secrets_group_name"]) + _git_repo = GitRepository.objects.create( + name=_repo["name"], + slug=slugify(_repo["name"]), + remote_url=_repo["url"], + branch=_git_branch, + secrets_group=_secrets_group, + provided_contents=_repo["provided_data_type"], + ) + _git_repo.custom_field_data["system_of_record"] = "Bootstrap" + _git_repo.validated_save() + + def _setup_dynamic_groups(self): + for _group in GLOBAL_YAML_SETTINGS["dynamic_group"]: + _content_type = ContentType.objects.get( + app_label=_group["content_type"].split(".")[0], + model=_group["content_type"].split(".")[1], + ) + _dynamic_group = DynamicGroup.objects.create( + name=_group["name"], + content_type=_content_type, + filter=json.loads(_group["filter"]), + description=_group["description"], + ) + _dynamic_group.custom_field_data["system_of_record"] = "Bootstrap" + _dynamic_group.validated_save() + _dynamic_group.refresh_from_db() + + def _setup_software_and_images(self): + for _software in GLOBAL_YAML_SETTINGS["software"]: + _tags = [] + for _tag in _software["tags"]: + _tags.append(Tag.objects.get(name=_tag)) + _platform = Platform.objects.get(name=_software["device_platform"]) + _soft = SoftwareLCM.objects.create( + version=_software["version"], + alias=_software["alias"], + device_platform=_platform, + end_of_support=_software["eos_date"], + long_term_support=_software["lts"], + pre_release=_software["pre_release"], + documentation_url=_software["documentation_url"], + tags=_tags, + ) + _soft.custom_field_data["system_of_record"] = "Bootstrap" + _soft.validated_save() + _soft.refresh_from_db() + + for _software_image in GLOBAL_YAML_SETTINGS["software_image"]: + _tags = [] + for _tag in _software_image["tags"]: + _tags.append(Tag.objects.get(name=_tag)) + _platform = Platform.objects.get(name=_software_image["platform"]) + _software = SoftwareLCM.objects.get(version=_software_image["software_version"], device_platform=_platform) + _soft_image = SoftwareImageLCM.objects.create( + software=_software, + image_file_name=_software_image["file_name"], + image_file_checksum=_software_image["image_file_checksum"], + hashing_algorithm=_software_image["hashing_algorithm"], + download_url=_software_image["download_url"], + default_image=_software_image["default_image"], + tags=_tags, + ) + _soft_image.custom_field_data["system_of_record"] = "Bootstrap" + _soft_image.validated_save() + _soft_image.refresh_from_db() + + def _setup_validated_software(self): + for validated_software_data in GLOBAL_YAML_SETTINGS["validated_software"]: + tags = self._get_validated_software_tags(validated_software_data["tags"]) + devices = self._get_devices(validated_software_data["devices"]) + device_types = self._get_device_types(validated_software_data["device_types"]) + device_roles = self._get_device_roles(validated_software_data["device_roles"]) + inventory_items = self._get_inventory_items(validated_software_data["inventory_items"]) + object_tags = self._get_object_tags(validated_software_data["object_tags"]) + + software = self._get_software(validated_software_data["software"]) + + validated_software = ValidatedSoftwareLCM.objects.create( + software=software, + start=validated_software_data["valid_since"], + end=validated_software_data["valid_until"], + preferred=validated_software_data["preferred_version"], + tags=tags, + ) + validated_software.custom_field_data["system_of_record"] = "Bootstrap" + validated_software.validated_save() + + self._set_validated_software_relations( + validated_software, + devices, + device_types, + device_roles, + inventory_items, + object_tags, + ) + + def _get_validated_software_tags(self, tag_names): + return [Tag.objects.get(name=tag_name) for tag_name in tag_names] + + def _get_devices(self, device_names): + return [Device.objects.get(name=device_name) for device_name in device_names] + + def _get_device_types(self, device_type_names): + return [DeviceType.objects.get(model=device_type_name) for device_type_name in device_type_names] + + def _get_device_roles(self, device_role_names): + return [Role.objects.get(name=device_role_name) for device_role_name in device_role_names] + + def _get_inventory_items(self, inventory_item_names): + return [InventoryItem.objects.get(name=inventory_item_name) for inventory_item_name in inventory_item_names] + + def _get_object_tags(self, object_tag_names): + return [Tag.objects.get(name=object_tag_name) for object_tag_name in object_tag_names] + + def _get_software(self, software_name): + _, software_version = software_name.split(" - ") + platform = Platform.objects.get(name=_) + software = SoftwareLCM.objects.get(version=software_version, device_platform=platform) + return software + + def _set_validated_software_relations( + self, + validated_software, + devices, + device_types, + device_roles, + inventory_items, + object_tags, + ): # pylint: disable=too-many-arguments + validated_software.devices.set(devices) + validated_software.device_types.set(device_types) + + _ = device_roles + _ = inventory_items + _ = object_tags diff --git a/nautobot_ssot/tests/device42/unit/test_device42_adapter.py b/nautobot_ssot/tests/device42/unit/test_device42_adapter.py index 4f6129708..441f3c837 100644 --- a/nautobot_ssot/tests/device42/unit/test_device42_adapter.py +++ b/nautobot_ssot/tests/device42/unit/test_device42_adapter.py @@ -5,6 +5,7 @@ from diffsync.exceptions import ObjectAlreadyExists, ObjectNotFound from nautobot.core.testing import TransactionTestCase +from nautobot.dcim.models import LocationType from nautobot.extras.models import JobResult from parameterized import parameterized @@ -75,6 +76,7 @@ def setUp(self): self.d42_client.get_ipaddr_default_custom_fields.return_value = {} self.job = self.job_class() + self.job.building_loctype = LocationType.objects.get_or_create(name="Site")[0] self.job.logger = MagicMock() self.job.logger.info = MagicMock() self.job.logger.warning = MagicMock() @@ -105,12 +107,12 @@ def test_data_loading(self): """Test the load() function.""" self.device42.load_buildings() self.assertEqual( - {site["name"] for site in BUILDING_FIXTURE}, + {f"{site['name']}__{self.job.building_loctype.name}" for site in BUILDING_FIXTURE}, {site.get_unique_id() for site in self.device42.get_all("building")}, ) self.device42.load_rooms() self.assertEqual( - {f"{room['name']}__{room['building']}" for room in ROOM_FIXTURE}, + {f"{room['name']}__{room['building']}__{self.job.building_loctype.name}" for room in ROOM_FIXTURE}, {room.get_unique_id() for room in self.device42.get_all("room")}, ) self.device42.load_racks() @@ -157,7 +159,7 @@ def test_load_buildings_duplicate_site(self): self.device42.load_buildings() self.device42.load_buildings() self.job.logger.warning.assert_called_with( - "Microsoft HQ is already loaded. ('Object Microsoft HQ already present', building \"Microsoft HQ\")" + "Microsoft HQ is already loaded. ('Object Microsoft HQ__Site already present', building \"Microsoft HQ__Site\")" ) def test_load_rooms_duplicate_room(self): @@ -166,7 +168,7 @@ def test_load_rooms_duplicate_room(self): self.device42.load_rooms() self.device42.load_rooms() self.job.logger.warning.assert_called_with( - "Secondary IDF is already loaded. ('Object Secondary IDF__Microsoft HQ already present', room \"Secondary IDF__Microsoft HQ\")" + "Secondary IDF is already loaded. ('Object Secondary IDF__Microsoft HQ__Site already present', room \"Secondary IDF__Microsoft HQ__Site\")" ) def test_load_rooms_missing_building(self): diff --git a/nautobot_ssot/tests/device42/unit/test_utils_device42.py b/nautobot_ssot/tests/device42/unit/test_utils_device42.py index 80c8f3da1..08def18a2 100644 --- a/nautobot_ssot/tests/device42/unit/test_utils_device42.py +++ b/nautobot_ssot/tests/device42/unit/test_utils_device42.py @@ -7,6 +7,7 @@ from nautobot.core.testing import TestCase from parameterized import parameterized +from nautobot_ssot.exceptions import MissingConfigSetting from nautobot_ssot.integrations.device42.jobs import Device42DataSource from nautobot_ssot.integrations.device42.utils import device42 @@ -23,7 +24,7 @@ class TestMissingConfigSetting(TestCase): def setUp(self): """Setup MissingConfigSetting instance.""" self.setting = "D42_URL" - self.missing_setting = device42.MissingConfigSetting(setting=self.setting) + self.missing_setting = MissingConfigSetting(setting=self.setting) def test_missingconfigsetting(self): self.assertTrue(self.missing_setting.setting == "D42_URL") diff --git a/nautobot_ssot/tests/dna_center/test_adapters_dna_center.py b/nautobot_ssot/tests/dna_center/test_adapters_dna_center.py index ee3e6cfaf..36442cfa8 100644 --- a/nautobot_ssot/tests/dna_center/test_adapters_dna_center.py +++ b/nautobot_ssot/tests/dna_center/test_adapters_dna_center.py @@ -81,6 +81,9 @@ def setUp(self): ) self.hq_site.validated_save() + self.floor_loc_type = LocationType.objects.get_or_create(name="Floor", parent=self.site_loc_type)[0] + self.floor_loc_type.content_types.add(ContentType.objects.get_for_model(Device)) + cisco_manu = Manufacturer.objects.get_or_create(name="Cisco")[0] catalyst_devicetype = DeviceType.objects.get_or_create(model="WS-C3850-24P-L", manufacturer=cisco_manu)[0] core_role, created = Role.objects.get_or_create(name="CORE") @@ -120,6 +123,9 @@ def setUp(self): dnac = Controller.objects.get_or_create(name="DNA Center", status=self.status_active, location=self.hq_site)[0] self.job = DnaCenterDataSource() + self.job.area_loctype = self.reg_loc_type + self.job.building_loctype = self.site_loc_type + self.job.floor_loctype = self.floor_loc_type self.job.dnac = dnac self.job.controller_group = ControllerManagedDeviceGroup.objects.get_or_create( name="DNA Center Managed Devices", controller=dnac @@ -187,7 +193,7 @@ def test_load_areas_w_global(self): area_actual = sorted([area.get_unique_id() for area in self.dna_center.get_all("area")]) self.assertEqual(area_actual, area_expected) self.dna_center.job.logger.info.assert_called_with( - "Loaded area Sydney. {'parentId': '262696b1-aa87-432b-8a21-db9a77c51f23', 'additionalInfo': [{'nameSpace': 'Location', 'attributes': {'addressInheritedFrom': '262696b1-aa87-432b-8a21-db9a77c51f23', 'type': 'area'}}], 'name': 'Sydney', 'instanceTenantId': '623f029857259506a56ad9bd', 'id': '6e404051-4c06-4dab-adaa-72c5eeac577b', 'siteHierarchy': '9e5f9fc2-032e-45e8-994c-4a00629648e8/262696b1-aa87-432b-8a21-db9a77c51f23/6e404051-4c06-4dab-adaa-72c5eeac577b', 'siteNameHierarchy': 'Global/Australia/Sydney'}" + "Loaded Region Sydney. {'parentId': '262696b1-aa87-432b-8a21-db9a77c51f23', 'additionalInfo': [{'nameSpace': 'Location', 'attributes': {'addressInheritedFrom': '262696b1-aa87-432b-8a21-db9a77c51f23', 'type': 'area'}}], 'name': 'Sydney', 'instanceTenantId': '623f029857259506a56ad9bd', 'id': '6e404051-4c06-4dab-adaa-72c5eeac577b', 'siteHierarchy': '9e5f9fc2-032e-45e8-994c-4a00629648e8/262696b1-aa87-432b-8a21-db9a77c51f23/6e404051-4c06-4dab-adaa-72c5eeac577b', 'siteNameHierarchy': 'Global/Australia/Sydney'}" ) @override_settings(PLUGINS_CONFIG={"nautobot_ssot": {"dna_center_import_global": False}}) @@ -216,7 +222,9 @@ def test_load_buildings_w_global(self): ("", "building"), ] self.dna_center.load_buildings(buildings=EXPECTED_BUILDINGS) - building_expected = [x["name"] for x in EXPECTED_DNAC_LOCATION_MAP.values() if x["loc_type"] == "building"] + building_expected = [ + f"{x['name']}__{x['parent']}" for x in EXPECTED_DNAC_LOCATION_MAP.values() if x["loc_type"] == "building" + ] building_actual = [building.get_unique_id() for building in self.dna_center.get_all("building")] self.assertEqual(building_actual, building_expected) @@ -235,7 +243,9 @@ def test_load_buildings_wo_global(self): self.dna_center.dnac_location_map = EXPECTED_DNAC_LOCATION_MAP_WO_GLOBAL self.dna_center.load_buildings(buildings=EXPECTED_BUILDINGS) building_expected = [ - x["name"] for x in EXPECTED_DNAC_LOCATION_MAP_WO_GLOBAL.values() if x["loc_type"] == "building" + f"{x['name']}__{x['parent']}" + for x in EXPECTED_DNAC_LOCATION_MAP_WO_GLOBAL.values() + if x["loc_type"] == "building" ] building_actual = [building.get_unique_id() for building in self.dna_center.get_all("building")] self.assertEqual(sorted(building_actual), sorted(building_expected)) @@ -254,7 +264,7 @@ def test_load_buildings_duplicate(self): ] self.dna_center.load_buildings(buildings=EXPECTED_BUILDINGS) self.dna_center.load_buildings(buildings=[EXPECTED_BUILDINGS[0]]) - self.dna_center.job.logger.warning.assert_called_with("Building Building1 already loaded so skipping.") + self.dna_center.job.logger.warning.assert_called_with("Site Building1 already loaded so skipping.") def test_load_buildings_with_validation_error(self): """Test Nautobot SSoT for Cisco DNA Center load_buildings() function with a ValidationError.""" diff --git a/nautobot_ssot/tests/dna_center/test_adapters_nautobot.py b/nautobot_ssot/tests/dna_center/test_adapters_nautobot.py index 51a1eda5a..596939f56 100644 --- a/nautobot_ssot/tests/dna_center/test_adapters_nautobot.py +++ b/nautobot_ssot/tests/dna_center/test_adapters_nautobot.py @@ -48,10 +48,12 @@ def setUp(self): # pylint: disable=too-many-locals job.job_result = JobResult.objects.create( name=job.class_path, task_name="fake task", user=None, id=uuid.uuid4() ) + job.logger.info = MagicMock() + job.logger.warning = MagicMock() + job.area_loctype = self.reg_loc_type + job.building_loctype = self.site_loc_type + job.floor_loctype = self.floor_loc_type self.nb_adapter = NautobotAdapter(job=job, sync=None) - self.nb_adapter.job = MagicMock() - self.nb_adapter.job.logger.info = MagicMock() - self.nb_adapter.job.logger.warning = MagicMock() def build_nautobot_objects(self): # pylint: disable=too-many-locals, too-many-statements """Build out Nautobot objects to test loading.""" @@ -259,7 +261,7 @@ def test_data_loading(self): sorted(loc.get_unique_id() for loc in self.nb_adapter.get_all("area")), ) self.assertEqual( - ["HQ"], + ["HQ__NY"], sorted(site.get_unique_id() for site in self.nb_adapter.get_all("building")), ) self.assertEqual( @@ -289,11 +291,11 @@ def test_data_loading(self): sorted(ipaddr.get_unique_id() for ipaddr in self.nb_adapter.get_all("ipaddress")), ) - def test_load_regions_failure(self): - """Test the load_regions method failing with loading duplicate Regions.""" + def test_load_areas_failure(self): + """Test the load_areas method failing with loading duplicate Areas.""" self.build_nautobot_objects() self.nb_adapter.load() - self.nb_adapter.load_regions() + self.nb_adapter.load_areas() self.nb_adapter.job.logger.warning.assert_called_with("Region NY already loaded so skipping duplicate.") @patch("nautobot_ssot.integrations.dna_center.diffsync.adapters.nautobot.OrmLocationType") @@ -311,9 +313,7 @@ def test_load_floors_missing_building(self, mock_floors, mock_loc_type): self.nb_adapter.get = MagicMock() self.nb_adapter.get.side_effect = [ObjectNotFound()] self.nb_adapter.load_floors() - self.nb_adapter.job.logger.warning.assert_called_with( - "Unable to load building Missing for floor HQ - Floor 1. " - ) + self.nb_adapter.job.logger.warning.assert_called_with("Unable to load Site Missing for Floor HQ - Floor 1. ") def test_sync_complete(self): """Test the sync_complete() method in the NautobotAdapter.""" diff --git a/nautobot_ssot/tests/dna_center/test_models_nautobot.py b/nautobot_ssot/tests/dna_center/test_models_nautobot.py index 12d5afa6a..39f7c30a8 100644 --- a/nautobot_ssot/tests/dna_center/test_models_nautobot.py +++ b/nautobot_ssot/tests/dna_center/test_models_nautobot.py @@ -30,11 +30,12 @@ class TestNautobotArea(TransactionTestCase): def setUp(self): super().setUp() + self.region_type = LocationType.objects.get_or_create(name="Region", nestable=True)[0] self.adapter = Adapter() self.adapter.job = MagicMock() + self.adapter.job.area_loctype = self.region_type self.adapter.job.logger.info = MagicMock() self.adapter.region_map = {} - self.region_type = LocationType.objects.get_or_create(name="Region", nestable=True)[0] self.adapter.locationtype_map = {"Region": self.region_type.id} self.adapter.status_map = {"Active": Status.objects.get(name="Active").id} @@ -72,17 +73,21 @@ class TestNautobotBuilding(TransactionTestCase): def setUp(self): super().setUp() + self.reg_loc = LocationType.objects.get_or_create(name="Region", nestable=True)[0] + loc_type = LocationType.objects.get_or_create(name="Site", parent=self.reg_loc)[0] self.adapter = Adapter() self.adapter.job = MagicMock() self.adapter.job.debug = True + self.adapter.job.area_loctype = self.reg_loc + self.adapter.job.building_loctype = loc_type self.adapter.job.logger.info = MagicMock() self.adapter.status_map = {"Active": Status.objects.get(name="Active").id} ga_tenant = Tenant.objects.create(name="G&A") self.adapter.tenant_map = {"G&A": ga_tenant.id} - reg_loc = LocationType.objects.get_or_create(name="Region", nestable=True)[0] - ny_region = Location.objects.create(name="NY", location_type=reg_loc, status=Status.objects.get(name="Active")) - loc_type = LocationType.objects.get_or_create(name="Site", parent=reg_loc)[0] - self.adapter.locationtype_map = {"Region": reg_loc.id, "Site": loc_type.id} + ny_region = Location.objects.create( + name="NY", location_type=self.reg_loc, status=Status.objects.get(name="Active") + ) + self.adapter.locationtype_map = {"Region": self.reg_loc.id, "Site": loc_type.id} self.sec_site = Location.objects.create( name="Site 2", parent=ny_region, status=Status.objects.get(name="Active"), location_type=loc_type ) @@ -102,10 +107,9 @@ def setUp(self): def test_create(self): """Validate the NautobotBuilding create() method creates a Site.""" - ids = {"name": "HQ"} + ids = {"name": "HQ", "area": "NY"} attrs = { "address": "123 Main St", - "area": "NY", "area_parent": None, "latitude": "12.345", "longitude": "-67.890", @@ -121,7 +125,7 @@ def test_create(self): self.adapter.job.logger.info.assert_called_once_with("Creating Site HQ.") site_obj = Location.objects.get(name=ids["name"], location_type__name="Site") self.assertEqual(site_obj.name, ids["name"]) - self.assertEqual(site_obj.parent.name, attrs["area"]) + self.assertEqual(site_obj.parent.name, ids["area"]) self.assertEqual(site_obj.physical_address, attrs["address"]) self.assertEqual(site_obj.tenant.name, attrs["tenant"]) @@ -157,15 +161,19 @@ def test_update_wo_tenant(self): def test_delete(self): """Validate the NautobotBuilding delete() method deletes a Site.""" ds_mock_site = MagicMock(spec=Location) + ds_mock_site.location_type = "Site" ds_mock_site.uuid = "1234567890" ds_mock_site.adapter = MagicMock() + ds_mock_site.adapter.job.building_loctype = self.adapter.job.building_loctype ds_mock_site.adapter.job.logger.info = MagicMock() mock_site = MagicMock(spec=Location) mock_site.name = "Test" site_get_mock = MagicMock(return_value=mock_site) with patch.object(Location.objects, "get", site_get_mock): result = NautobotBuilding.delete(ds_mock_site) - ds_mock_site.adapter.job.logger.info.assert_called_once_with("Deleting Site Test.") + ds_mock_site.adapter.job.logger.info.assert_called_once_with( + f"Deleting {self.adapter.job.building_loctype.name} Test." + ) self.assertEqual(ds_mock_site, result) @@ -177,13 +185,16 @@ class TestNautobotFloor(TransactionTestCase): def setUp(self): super().setUp() + site_loc_type = LocationType.objects.get_or_create(name="Site")[0] + self.floor_loc_type = LocationType.objects.get_or_create(name="Floor", parent=site_loc_type)[0] self.adapter = Adapter() self.adapter.job = MagicMock() + self.adapter.job.building_loctype = site_loc_type + self.adapter.job.floor_loctype = self.floor_loc_type self.adapter.job.logger.info = MagicMock() ga_tenant = Tenant.objects.create(name="G&A") self.adapter.tenant_map = {"G&A": ga_tenant.id} - site_loc_type = LocationType.objects.get_or_create(name="Site")[0] - self.floor_loc_type = LocationType.objects.get_or_create(name="Floor", parent=site_loc_type)[0] + self.adapter.locationtype_map = {"Site": site_loc_type.id, "Floor": self.floor_loc_type.id} self.hq_site, _ = Location.objects.get_or_create( name="HQ", location_type=site_loc_type, status=Status.objects.get(name="Active") @@ -191,6 +202,7 @@ def setUp(self): self.adapter.site_map = {"HQ": self.hq_site.id} self.adapter.floor_map = {} self.adapter.status_map = {"Active": Status.objects.get(name="Active").id} + self.adapter.objects_to_delete = {"floors": []} def test_create(self): """Test the NautobotFloor create() method creates a LocationType: Floor.""" @@ -206,10 +218,9 @@ def test_create(self): def test_update_w_tenant(self): """Test the NautobotFloor update() method updates a LocationType: Floor with tenant.""" - floor_type = LocationType.objects.get(name="Floor") mock_floor = Location.objects.create( name="HQ - Floor 2", - location_type=floor_type, + location_type=self.floor_loc_type, parent=self.hq_site, status=Status.objects.get(name="Active"), ) @@ -236,8 +247,7 @@ def test_update_wo_tenant(self): ) mock_floor.validated_save() test_floor = NautobotFloor(name="HQ - Floor 2", building="HQ", tenant="", uuid=mock_floor.id) - test_floor.adapter = MagicMock() - test_floor.adapter.job.logger.info = MagicMock() + test_floor.adapter = self.adapter update_attrs = { "tenant": None, } @@ -249,16 +259,18 @@ def test_update_wo_tenant(self): def test_delete(self): """Validate the NautobotFloor delete() method deletes a LocationType: Floor.""" ds_mock_floor = MagicMock(spec=Location) + ds_mock_floor.location_type = "Floor" ds_mock_floor.uuid = "1234567890" - ds_mock_floor.adapter = MagicMock() - ds_mock_floor.adapter.job.logger.info = MagicMock() + ds_mock_floor.adapter = self.adapter mock_floor = MagicMock(spec=Location) mock_floor.name = "Test" mock_floor.parent.name = "HQ" floor_get_mock = MagicMock(return_value=mock_floor) with patch.object(Location.objects, "get", floor_get_mock): result = NautobotFloor.delete(ds_mock_floor) - ds_mock_floor.adapter.job.logger.info.assert_called_once_with("Deleting Floor Test in HQ.") + ds_mock_floor.adapter.job.logger.info.assert_called_once_with( + f"Deleting {self.adapter.job.floor_loctype.name} Test in HQ." + ) self.assertEqual(ds_mock_floor, result) diff --git a/nautobot_ssot/tests/meraki/__init__.py b/nautobot_ssot/tests/meraki/__init__.py new file mode 100644 index 000000000..aec6aabc0 --- /dev/null +++ b/nautobot_ssot/tests/meraki/__init__.py @@ -0,0 +1 @@ +"""Unit tests for Meraki SSoT app.""" diff --git a/nautobot_ssot/tests/meraki/fixtures/__init__.py b/nautobot_ssot/tests/meraki/fixtures/__init__.py new file mode 100644 index 000000000..c7221233b --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/__init__.py @@ -0,0 +1 @@ +"""Fixtures for unit tests.""" diff --git a/nautobot_ssot/tests/meraki/fixtures/fixtures.py b/nautobot_ssot/tests/meraki/fixtures/fixtures.py new file mode 100644 index 000000000..7dd17242d --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/fixtures.py @@ -0,0 +1,35 @@ +"""Collection of fixtures to be used for unit testing.""" + +import json + + +def load_json(path): + """Load a json file.""" + with open(path, encoding="utf-8") as file: + return json.loads(file.read()) + + +GET_ORG_NETWORKS_SENT_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_org_networks_sent.json") +GET_ORG_NETWORKS_RECV_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_org_networks_recv.json") +NETWORK_MAP_FIXTURE = load_json("./nautobot_ssot/tests/meraki//fixtures/network_map.json") +GET_ORG_DEVICES_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_org_devices.json") +GET_ORG_DEVICE_STATUSES_SENT_FIXTURE = load_json( + "./nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_sent.json" +) +GET_ORG_DEVICE_STATUSES_RECV_FIXTURE = load_json( + "./nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_recv.json" +) +GET_MANAGEMENT_PORTS_SENT_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_management_ports_sent.json") +GET_MANAGEMENT_PORTS_RECV_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_management_ports_recv.json") +GET_ORG_SWITCHPORTS_SENT_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_org_switchports_sent.json") +GET_ORG_SWITCHPORTS_RECV_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_org_switchports_recv.json") +GET_ORG_UPLINK_STATUSES_SENT_FIXTURE = load_json( + "./nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_sent.json" +) +GET_ORG_UPLINK_STATUSES_RECV_FIXTURE = load_json( + "./nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_recv.json" +) +GET_SWITCHPORT_STATUSES = load_json("./nautobot_ssot/tests/meraki/fixtures/get_switchport_statuses.json") +GET_UPLINK_SETTINGS_SENT = load_json("./nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_sent.json") +GET_UPLINK_SETTINGS_RECV = load_json("./nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_recv.json") +GET_APPLIANCE_SWITCHPORTS_FIXTURE = load_json("./nautobot_ssot/tests/meraki/fixtures/get_appliance_switchports.json") diff --git a/nautobot_ssot/tests/meraki/fixtures/get_appliance_switchports.json b/nautobot_ssot/tests/meraki/fixtures/get_appliance_switchports.json new file mode 100644 index 000000000..8d24778c3 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_appliance_switchports.json @@ -0,0 +1,83 @@ +[ + { + "number": 3, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 10, + "accessPolicy": "open" + }, + { + "number": 4, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 20, + "accessPolicy": "open" + }, + { + "number": 5, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 30, + "accessPolicy": "open" + }, + { + "number": 6, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open" + }, + { + "number": 7, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open" + }, + { + "number": 8, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open" + }, + { + "number": 9, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open" + }, + { + "number": 10, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open" + }, + { + "number": 11, + "enabled": true, + "type": "access", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open" + }, + { + "number": 12, + "enabled": true, + "type": "trunk", + "dropUntaggedTraffic": false, + "vlan": 1000, + "accessPolicy": "open", + "allowedVlans": "all" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_management_ports_recv.json b/nautobot_ssot/tests/meraki/fixtures/get_management_ports_recv.json new file mode 100644 index 000000000..e30165c92 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_management_ports_recv.json @@ -0,0 +1,19 @@ +{ + "wan1": { + "wanEnabled": "not configured", + "usingStaticIp": true, + "staticIp": "10.1.15.10", + "staticSubnetMask": "255.255.255.0", + "staticGatewayIp": "10.1.15.1", + "staticDns": [ + "1.1.1.1", + "1.0.0.1" + ], + "vlan": null + }, + "wan2": { + "wanEnabled": "not configured", + "usingStaticIp": false, + "vlan": null + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_management_ports_sent.json b/nautobot_ssot/tests/meraki/fixtures/get_management_ports_sent.json new file mode 100644 index 000000000..6a6fa2bf1 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_management_ports_sent.json @@ -0,0 +1,24 @@ +{ + "wan1": { + "wanEnabled": "not configured", + "usingStaticIp": true, + "staticIp": "10.1.15.10", + "staticSubnetMask": "255.255.255.0", + "staticGatewayIp": "10.1.15.1", + "staticDns": [ + "1.1.1.1", + "1.0.0.1" + ], + "vlan": null + }, + "wan2": { + "wanEnabled": "not configured", + "usingStaticIp": false, + "vlan": null + }, + "ddnsHostnames": { + "activeDdnsHostname": "ioa-epaliuc-ivab-ycnx-scvautuhqs.vcjrawe-a.com", + "ddnsHostnameWan1": "mkg-qsuobwv-esae-pvtl-gikqpgktwj-1.hkykafr-u.com", + "ddnsHostnameWan2": "lij-ukwthgu-gskj-akup-avposljtgm-4.eymrwab-p.com" + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_recv.json b/nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_recv.json new file mode 100644 index 000000000..554d53d8d --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_recv.json @@ -0,0 +1,6 @@ +{ + "Lab01": "online", + "HQ01": "online", + "Lab Switch": "online", + "HQ AP": "online" +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_sent.json b/nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_sent.json new file mode 100644 index 000000000..b328fe334 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_device_statuses_sent.json @@ -0,0 +1,89 @@ +[ + { + "name": "Lab01", + "serial": "D7JB-OH13-QFAG", + "mac": "60:00:4j:7j:5s:00", + "publicIp": "47.88.30.14", + "networkId": "H_203630551508078460", + "status": "online", + "lastReportedAt": "2877-54-74T26:13:64K", + "productType": "appliance", + "components": { + "powerSupplies": [] + }, + "model": "MX400", + "tags": [], + "usingCellularFailover": false, + "wan1Ip": "47.88.30.14", + "wan1Gateway": "47.88.30.1", + "wan1IpType": "static", + "wan1PrimaryDns": "1.1.1.1", + "wan1SecondaryDns": "1.0.0.1", + "wan2Ip": null + }, + { + "name": "HQ01", + "serial": "V4GD-ABDP-YVCK", + "mac": "07:77:7i:8x:2a:nj", + "publicIp": "36.54.12.32", + "networkId": "L_165471703274884707", + "status": "online", + "lastReportedAt": "7376-34-61L20:55:35X", + "productType": "appliance", + "components": { + "powerSupplies": [] + }, + "model": "MX400", + "tags": [], + "usingCellularFailover": false, + "wan1Ip": "36.54.12.32", + "wan1Gateway": "36.54.12.1", + "wan1IpType": "static", + "wan1PrimaryDns": "1.1.1.1", + "wan1SecondaryDns": "1.0.0.1", + "wan2Ip": null + }, + { + "name": "Lab Switch", + "serial": "N0BA-AWBF-DCWP", + "mac": "66:76:4h:6p:4p:tw", + "publicIp": "47.88.30.15", + "networkId": "L_165471703274884707", + "status": "online", + "lastReportedAt": "7376-34-61L20:55:35X", + "productType": "switch", + "components": { + "powerSupplies": [] + }, + "model": "MS225-24", + "tags": [], + "usingCellularFailover": false, + "wan1Ip": "47.88.30.15", + "wan1Gateway": "47.88.30.1", + "wan1IpType": "static", + "wan1PrimaryDns": "1.1.1.1", + "wan1SecondaryDns": "1.0.0.1", + "wan2Ip": null + }, + { + "name": "HQ AP", + "serial": "L6XI-2BIN-EUTI", + "mac": "sl:31:m3:57:i0:1g", + "publicIp": "", + "networkId": "L_165471703274884707", + "status": "online", + "lastReportedAt": "7376-34-61L20:55:35X", + "productType": "wireless", + "components": { + "powerSupplies": [] + }, + "model": "MR42", + "tags": [], + "usingCellularFailover": false, + "lanIp": "10.5.52.3", + "gateway": "10.5.52.1", + "ipType": "static", + "primaryDns": "1.1.1.1", + "secondaryDns": "1.0.0.1" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_devices.json b/nautobot_ssot/tests/meraki/fixtures/get_org_devices.json new file mode 100644 index 000000000..7098fee22 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_devices.json @@ -0,0 +1,73 @@ +[ + { + "name": "Lab01", + "serial": "D7JB-OH13-QFAG", + "mac": "60:00:4j:7j:5s:00", + "networkId": "H_203630551508078460", + "productType": "appliance", + "model": "MX400", + "address": "123 Main St, New York, NY", + "lat": 28.5881, + "lng": -84.25263, + "notes": "Lab device", + "tags": [], + "wan1Ip": "47.88.30.14", + "wan2Ip": null, + "configurationUpdatedAt": "2877-54-74T26:13:64K", + "firmware": "wired-16-16-9", + "url": "https://i384.meraki.com/Lab/n/iWFBbj2y/manage/nodes/new_list/2664516" + }, + { + "name": "HQ01", + "serial": "V4GD-ABDP-YVCK", + "mac": "07:77:7i:8x:2a:nj", + "networkId": "L_165471703274884707", + "productType": "appliance", + "model": "MX400", + "address": "123 Main St, DFW, TX", + "lat": 21.7321, + "lng": -13.64218, + "notes": "HQ device", + "tags": [], + "wan1Ip": "36.54.12.32", + "wan2Ip": null, + "configurationUpdatedAt": "7376-34-61L20:55:35X", + "firmware": "wired-16-16-9", + "url": "https://g488.meraki.com/HQ/n/dJBOpa0s/manage/nodes/new_list/7648103" + }, + { + "name": "Lab Switch", + "serial": "N0BA-AWBF-DCWP", + "mac": "66:76:4h:6p:4p:tw", + "networkId": "H_203630551508078460", + "productType": "switch", + "model": "MS225-24", + "address": "123 Main St, DFW, TX", + "lat": 28.5881, + "lng": -84.25263, + "notes": "Lab switch", + "tags": [], + "wan1Ip": "47.88.30.15", + "wan2Ip": null, + "configurationUpdatedAt": "2877-54-74T26:13:64K", + "firmware": "switch-15-21-1", + "url": "https://i384.meraki.com/Lab/n/bCUOar1a/manage/nodes/new_list/5738451" + }, + { + "name": "HQ AP", + "serial": "L6XI-2BIN-EUTI", + "mac": "sl:31:m3:57:i0:1g", + "networkId": "L_165471703274884707", + "productType": "wireless", + "model": "MR42", + "address": "", + "lat": 21.7321, + "lng": -13.64218, + "notes": "", + "tags": [], + "lanIp": "10.5.52.3", + "configurationUpdatedAt": "7376-34-61L20:55:35X", + "firmware": "wireless-29-5-1", + "url": "https://v363.meraki.com/HQ/n/cJub4qYk/manage/nodes/new_list/033568336184840" + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_networks_recv.json b/nautobot_ssot/tests/meraki/fixtures/get_org_networks_recv.json new file mode 100644 index 000000000..3e0750674 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_networks_recv.json @@ -0,0 +1,37 @@ +{ + "H_203630551508078460": { + "id": "H_203630551508078460", + "organizationId": "123456", + "name": "Lab", + "productTypes": [ + "appliance", + "wireless" + ], + "timeZone": "US/Eastern", + "tags": [ + "Lab" + ], + "enrollmentString": null, + "url": "https://s875.meraki.com/Lab/n/AYWjEr1b/manage/usage/list", + "notes": "Lab", + "isBoundToConfigTemplate": false + }, + "L_165471703274884707": { + "id": "L_165471703274884707", + "organizationId": "123456", + "name": "HQ", + "productTypes": [ + "appliance", + "switch", + "wireless" + ], + "timeZone": "US/Central", + "tags": [ + "HQ" + ], + "enrollmentString": null, + "url": "https://o217.meraki.com/HQ/n/fpwVLq4i/manage/usage/list", + "notes": "", + "isBoundToConfigTemplate": false + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_networks_sent.json b/nautobot_ssot/tests/meraki/fixtures/get_org_networks_sent.json new file mode 100644 index 000000000..822cb0477 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_networks_sent.json @@ -0,0 +1,37 @@ +[ + { + "id": "H_203630551508078460", + "organizationId": "123456", + "name": "Lab", + "productTypes": [ + "appliance", + "wireless" + ], + "timeZone": "US/Eastern", + "tags": [ + "Lab" + ], + "enrollmentString": null, + "url": "https://s875.meraki.com/Lab/n/AYWjEr1b/manage/usage/list", + "notes": "Lab", + "isBoundToConfigTemplate": false + }, + { + "id": "L_165471703274884707", + "organizationId": "123456", + "name": "HQ", + "productTypes": [ + "appliance", + "switch", + "wireless" + ], + "timeZone": "US/Central", + "tags": [ + "HQ" + ], + "enrollmentString": null, + "url": "https://o217.meraki.com/HQ/n/fpwVLq4i/manage/usage/list", + "notes": "", + "isBoundToConfigTemplate": false + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_switchports_recv.json b/nautobot_ssot/tests/meraki/fixtures/get_org_switchports_recv.json new file mode 100644 index 000000000..3228d6196 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_switchports_recv.json @@ -0,0 +1,464 @@ +{ + "N0BA-AWBF-DCWP": { + "name": "Lab Switch", + "serial": "N0BA-AWBF-DCWP", + "mac": "66:76:4h:6p:4p:tw", + "network": { + "id": "H_203630551508078460", + "name": "Lab" + }, + "model": "MS225-24", + "ports": [ + { + "portId": "1", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "2", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "3", + "name": null, + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "4", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "5", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "6", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "7", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "8", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "9", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "10", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "11", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "12", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "13", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "14", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "15", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "16", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "17", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "18", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "19", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "20", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "21", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "22", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "23", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "24", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 2305, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "25", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "26", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "27", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "28", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "29", + "name": null, + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "30", + "name": null, + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + } + ] + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_switchports_sent.json b/nautobot_ssot/tests/meraki/fixtures/get_org_switchports_sent.json new file mode 100644 index 000000000..154f15553 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_switchports_sent.json @@ -0,0 +1,464 @@ +[ + { + "name": "Lab Switch", + "serial": "N0BA-AWBF-DCWP", + "mac": "66:76:4h:6p:4p:tw", + "network": { + "id": "H_203630551508078460", + "name": "Lab" + }, + "model": "MS225-24", + "ports": [ + { + "portId": "1", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "2", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "3", + "name": null, + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "4", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "5", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "6", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "7", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "8", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "9", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "10", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "11", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "12", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "13", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "14", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "15", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "16", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "17", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "18", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "19", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "20", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "21", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "22", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "23", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 444, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "24", + "name": "", + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "access", + "vlan": 2305, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "25", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "26", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "27", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "28", + "name": "", + "tags": [], + "enabled": false, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "29", + "name": null, + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + }, + { + "portId": "30", + "name": null, + "tags": [], + "enabled": true, + "poeEnabled": false, + "type": "trunk", + "vlan": 1, + "voiceVlan": null, + "allowedVlans": "all", + "rstpEnabled": true, + "stpGuard": "disabled", + "linkNegotiation": "Auto negotiate", + "accessPolicyType": "Open" + } + ] + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_recv.json b/nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_recv.json new file mode 100644 index 000000000..e0d3ecf34 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_recv.json @@ -0,0 +1,66 @@ +{ + "D7JB-OH13-QFAG": { + "networkId": "H_203630551508078460", + "serial": "D7JB-OH13-QFAG", + "model": "MX400", + "highAvailability": { + "enabled": true, + "role": "spare" + }, + "lastReportedAt": "2877-54-74T26:13:64K", + "uplinks": [ + { + "interface": "wan1", + "status": "active", + "ip": "47.88.30.14", + "gateway": "47.88.30.1", + "publicIp": "47.88.30.14", + "primaryDns": "1.1.1.1", + "secondaryDns": "1.0.0.1", + "ipAssignedBy": "static" + }, + { + "interface": "wan2", + "status": "not connected", + "ip": null, + "gateway": null, + "publicIp": null, + "primaryDns": null, + "secondaryDns": null, + "ipAssignedBy": null + } + ] + }, + "V4GD-ABDP-YVCK": { + "networkId": "L_165471703274884707", + "serial": "V4GD-ABDP-YVCK", + "model": "MX400", + "highAvailability": { + "enabled": true, + "role": "spare" + }, + "lastReportedAt": "7376-34-61L20:55:35X", + "uplinks": [ + { + "interface": "wan1", + "status": "active", + "ip": "36.54.12.32", + "gateway": "36.54.12.1", + "publicIp": "36.54.12.32", + "primaryDns": "1.1.1.1", + "secondaryDns": "1.0.0.1", + "ipAssignedBy": "static" + }, + { + "interface": "wan2", + "status": "not connected", + "ip": null, + "gateway": null, + "publicIp": null, + "primaryDns": null, + "secondaryDns": null, + "ipAssignedBy": null + } + ] + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_sent.json b/nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_sent.json new file mode 100644 index 000000000..710eadc3e --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_org_uplink_statuses_sent.json @@ -0,0 +1,66 @@ +[ + { + "networkId": "H_203630551508078460", + "serial": "D7JB-OH13-QFAG", + "model": "MX400", + "highAvailability": { + "enabled": true, + "role": "spare" + }, + "lastReportedAt": "2877-54-74T26:13:64K", + "uplinks": [ + { + "interface": "wan1", + "status": "active", + "ip": "47.88.30.14", + "gateway": "47.88.30.1", + "publicIp": "47.88.30.14", + "primaryDns": "1.1.1.1", + "secondaryDns": "1.0.0.1", + "ipAssignedBy": "static" + }, + { + "interface": "wan2", + "status": "not connected", + "ip": null, + "gateway": null, + "publicIp": null, + "primaryDns": null, + "secondaryDns": null, + "ipAssignedBy": null + } + ] + }, + { + "networkId": "L_165471703274884707", + "serial": "V4GD-ABDP-YVCK", + "model": "MX400", + "highAvailability": { + "enabled": true, + "role": "spare" + }, + "lastReportedAt": "7376-34-61L20:55:35X", + "uplinks": [ + { + "interface": "wan1", + "status": "active", + "ip": "36.54.12.32", + "gateway": "36.54.12.1", + "publicIp": "36.54.12.32", + "primaryDns": "1.1.1.1", + "secondaryDns": "1.0.0.1", + "ipAssignedBy": "static" + }, + { + "interface": "wan2", + "status": "not connected", + "ip": null, + "gateway": null, + "publicIp": null, + "primaryDns": null, + "secondaryDns": null, + "ipAssignedBy": null + } + ] + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_switchport_statuses.json b/nautobot_ssot/tests/meraki/fixtures/get_switchport_statuses.json new file mode 100644 index 000000000..098c0b061 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_switchport_statuses.json @@ -0,0 +1,1603 @@ +[ + { + "portId": "1", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "2", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "lldp": { + "portId": "70:30:h1:03:1i:65", + "chassisId": "61:18:t2:78:5e:24" + }, + "clientCount": 2, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "3", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "1 Gbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "4", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "5", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "6", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "7", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "8", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "9", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "lldp": { + "portId": "g1:54:f2:0j:62:6y", + "chassisId": "r7:54:j4:1q:44:5b" + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "10", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "11", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "12", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "13", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "1 Gbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "lldp": { + "portId": "p6:so:1r:k2:60:5b", + "chassisId": "s6:pi:3c:h4:84:7y" + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "14", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "15", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "16", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "17", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "18", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "lldp": { + "portId": "30:70:7q:21:j8:6g", + "chassisId": "50:13:6e:10:j1:1d" + }, + "clientCount": 2, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "19", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "20", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "21", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "22", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "23", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "24", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "25", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "26", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "27", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "28", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "29", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "30", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "31", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "32", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "33", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "34", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "35", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "36", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "1 Gbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "37", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "38", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "39", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "40", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "41", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "42", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "43", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "44", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "45", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "46", + "enabled": true, + "status": "Connected", + "isUplink": false, + "errors": [], + "warnings": [], + "speed": "100 Mbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 1, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "47", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "48", + "enabled": true, + "status": "Connected", + "isUplink": true, + "errors": [], + "warnings": [], + "speed": "1 Gbps", + "duplex": "full", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "lldp": { + "chassisId": "e7:7c:78:04:35:06" + }, + "clientCount": 5, + "powerUsageInWh": 0.0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "49", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "50", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "51", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "52", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "53", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + }, + { + "portId": "54", + "enabled": true, + "status": "Disconnected", + "isUplink": false, + "errors": [ + "Port disconnected" + ], + "warnings": [], + "speed": "", + "duplex": "", + "usageInKb": { + "total": 0, + "sent": 0, + "recv": 0 + }, + "clientCount": 0, + "trafficInKbps": { + "total": 0.0, + "sent": 0.0, + "recv": 0.0 + }, + "securePort": { + "enabled": false, + "active": false, + "authenticationStatus": "Disabled", + "configOverrides": {} + } + } +] \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_recv.json b/nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_recv.json new file mode 100644 index 000000000..f4ae89663 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_recv.json @@ -0,0 +1,38 @@ +{ + "wan1": { + "enabled": true, + "vlanTagging": { + "enabled": false + }, + "svis": { + "ipv4": { + "assignmentMode": "static", + "gateway": "10.1.15.1", + "address": "10.1.15.34/24", + "nameservers": { + "addresses": [ + "1.1.1.1", + "1.0.0.1" + ] + } + } + }, + "pppoe": { + "enabled": false + } + }, + "wan2": { + "enabled": true, + "vlanTagging": { + "enabled": false + }, + "svis": { + "ipv4": { + "assignmentMode": "dynamic" + } + }, + "pppoe": { + "enabled": false + } + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_sent.json b/nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_sent.json new file mode 100644 index 000000000..6dd9942ce --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/get_uplink_settings_sent.json @@ -0,0 +1,40 @@ +{ + "interfaces": { + "wan1": { + "enabled": true, + "vlanTagging": { + "enabled": false + }, + "svis": { + "ipv4": { + "assignmentMode": "static", + "gateway": "10.1.15.1", + "address": "10.1.15.34/24", + "nameservers": { + "addresses": [ + "1.1.1.1", + "1.0.0.1" + ] + } + } + }, + "pppoe": { + "enabled": false + } + }, + "wan2": { + "enabled": true, + "vlanTagging": { + "enabled": false + }, + "svis": { + "ipv4": { + "assignmentMode": "dynamic" + } + }, + "pppoe": { + "enabled": false + } + } + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/fixtures/network_map.json b/nautobot_ssot/tests/meraki/fixtures/network_map.json new file mode 100644 index 000000000..3e0750674 --- /dev/null +++ b/nautobot_ssot/tests/meraki/fixtures/network_map.json @@ -0,0 +1,37 @@ +{ + "H_203630551508078460": { + "id": "H_203630551508078460", + "organizationId": "123456", + "name": "Lab", + "productTypes": [ + "appliance", + "wireless" + ], + "timeZone": "US/Eastern", + "tags": [ + "Lab" + ], + "enrollmentString": null, + "url": "https://s875.meraki.com/Lab/n/AYWjEr1b/manage/usage/list", + "notes": "Lab", + "isBoundToConfigTemplate": false + }, + "L_165471703274884707": { + "id": "L_165471703274884707", + "organizationId": "123456", + "name": "HQ", + "productTypes": [ + "appliance", + "switch", + "wireless" + ], + "timeZone": "US/Central", + "tags": [ + "HQ" + ], + "enrollmentString": null, + "url": "https://o217.meraki.com/HQ/n/fpwVLq4i/manage/usage/list", + "notes": "", + "isBoundToConfigTemplate": false + } +} \ No newline at end of file diff --git a/nautobot_ssot/tests/meraki/test_adapters_meraki.py b/nautobot_ssot/tests/meraki/test_adapters_meraki.py new file mode 100644 index 000000000..31aa8d24a --- /dev/null +++ b/nautobot_ssot/tests/meraki/test_adapters_meraki.py @@ -0,0 +1,99 @@ +"""Test Meraki adapter.""" + +from unittest.mock import MagicMock + +from django.contrib.contenttypes.models import ContentType +from nautobot.core.testing import TransactionTestCase +from nautobot.dcim.models import Device, LocationType +from nautobot.extras.models import JobResult + +from nautobot_ssot.integrations.meraki.diffsync.adapters.meraki import MerakiAdapter +from nautobot_ssot.integrations.meraki.jobs import MerakiDataSource +from nautobot_ssot.tests.meraki.fixtures import fixtures as fix + + +class TestMerakiAdapterTestCase(TransactionTestCase): + """Test NautobotSsotMerakiAdapter class.""" + + databases = ("default", "job_logs") + + def setUp(self): + """Initialize test case.""" + self.meraki_client = MagicMock() + self.meraki_client.get_org_networks.return_value = fix.GET_ORG_NETWORKS_SENT_FIXTURE + self.meraki_client.network_map = fix.NETWORK_MAP_FIXTURE + self.meraki_client.get_org_devices.return_value = fix.GET_ORG_DEVICES_FIXTURE + self.meraki_client.get_org_device_statuses.return_value = fix.GET_ORG_DEVICE_STATUSES_RECV_FIXTURE + self.meraki_client.get_management_ports.return_value = fix.GET_MANAGEMENT_PORTS_RECV_FIXTURE + self.meraki_client.get_uplink_settings.return_value = fix.GET_UPLINK_SETTINGS_RECV + self.meraki_client.get_switchport_statuses.return_value = fix.GET_SWITCHPORT_STATUSES + self.meraki_client.get_org_uplink_statuses.return_value = fix.GET_ORG_UPLINK_STATUSES_RECV_FIXTURE + self.meraki_client.get_appliance_switchports.return_value = fix.GET_APPLIANCE_SWITCHPORTS_FIXTURE + self.meraki_client.get_org_switchports.return_value = fix.GET_ORG_SWITCHPORTS_RECV_FIXTURE + + site_loctype = LocationType.objects.get_or_create(name="Site")[0] + site_loctype.content_types.add(ContentType.objects.get_for_model(Device)) + self.job = MerakiDataSource() + self.job.logger.warning = MagicMock() + self.job.instance = MagicMock() + self.job.instance.controller_managed_device_groups = MagicMock() + self.job.instance.controller_managed_device_groups.first().name = "Meraki Managed Device Group" + self.job.instance.controller_managed_device_groups.count().return_value = 1 + self.job.hostname_mapping = [] + self.job.devicetype_mapping = [("MS", "Switch"), ("MX", "Firewall")] + self.job.network_loctype = site_loctype + self.job.job_result = JobResult.objects.create( + name=self.job.class_path, task_name="fake task", worker="default" + ) + self.meraki = MerakiAdapter(job=self.job, sync=None, client=self.meraki_client) + + def test_data_loading(self): + """Test Nautobot SSoT for Meraki load() function.""" + self.meraki_client.validate_organization_exists.return_value = True + self.meraki.load() + self.assertEqual( + {f"{net['name']}__None" for net in fix.GET_ORG_NETWORKS_SENT_FIXTURE}, + {net.get_unique_id() for net in self.meraki.get_all("network")}, + ) + self.assertEqual( + {dev["name"] for dev in fix.GET_ORG_DEVICES_FIXTURE}, + {dev.get_unique_id() for dev in self.meraki.get_all("device")}, + ) + wan1_ports = [ + f"wan1__{dev['name']}" + for dev in fix.GET_ORG_DEVICES_FIXTURE + if dev["model"].startswith(("MX", "MG", "MR", "MS", "Z")) + ] + wan2_ports = [ + f"wan2__{dev['name']}" + for dev in fix.GET_ORG_DEVICES_FIXTURE + if dev["model"].startswith(("MX", "MG", "MR", "MS", "Z")) + ] + lan_ports = [] + for port in fix.GET_APPLIANCE_SWITCHPORTS_FIXTURE: + for dev in fix.GET_ORG_DEVICES_FIXTURE: + if dev["model"].startswith(("MX", "MG", "Z")): + lan_ports.append(f"{port['number']}__{dev['name']}") + for switch in fix.GET_ORG_SWITCHPORTS_SENT_FIXTURE: + for port in switch["ports"]: + lan_ports.append(f"{port['portId']}__Lab Switch") + expected_ports = set(wan1_ports + wan2_ports + lan_ports) + self.assertEqual(expected_ports, {port.get_unique_id() for port in self.meraki.get_all("port")}) + self.assertEqual({"10.1.15.0/24__Global"}, {pf.get_unique_id() for pf in self.meraki.get_all("prefix")}) + self.assertEqual( + { + "10.1.15.10/24__10.1.15.0/24", + "10.1.15.34/24__10.1.15.0/24", + }, + {ip.get_unique_id() for ip in self.meraki.get_all("ipaddress")}, + ) + + def test_duplicate_device_loading_error(self): + """Validate error thrown when duplicate device attempts to be loaded.""" + self.meraki.load_devices() + self.meraki.load_devices() + self.job.logger.warning.assert_called() + self.job.logger.warning.calls[0].contains(message="Duplicate device Lab01 found and being skipped.") + self.job.logger.warning.calls[1].contains(message="Duplicate device HQ01 found and being skipped.") + self.job.logger.warning.calls[2].contains(message="Duplicate device Lab Switch found and being skipped.") + self.job.logger.warning.calls[3].contains(message="Duplicate device HQ AP found and being skipped.") diff --git a/nautobot_ssot/tests/meraki/test_adapters_nautobot.py b/nautobot_ssot/tests/meraki/test_adapters_nautobot.py new file mode 100644 index 000000000..f772611f3 --- /dev/null +++ b/nautobot_ssot/tests/meraki/test_adapters_nautobot.py @@ -0,0 +1,144 @@ +"""Unit tests for the Nautobot DiffSync adapter.""" + +from unittest.mock import MagicMock + +from django.contrib.auth import get_user_model +from django.contrib.contenttypes.models import ContentType +from nautobot.core.testing import TransactionTestCase +from nautobot.dcim.models import Device, DeviceType, Interface, Location, LocationType, Manufacturer, Platform +from nautobot.extras.models import JobResult, Note, Role, Status +from nautobot.ipam.models import IPAddress, IPAddressToInterface, Namespace, Prefix + +from nautobot_ssot.integrations.meraki.diffsync.adapters.nautobot import NautobotAdapter +from nautobot_ssot.integrations.meraki.jobs import MerakiDataSource + +User = get_user_model() + + +class NautobotDiffSyncTestCase(TransactionTestCase): + """Test the NautobotAdapter class.""" + + databases = ("default", "job_logs") + + def setUp(self): # pylint: disable=too-many-locals + """Per-test-case data setup.""" + super().setUp() + self.status_active = Status.objects.get(name="Active") + + self.region_type = LocationType.objects.get_or_create(name="Region", defaults={"nestable": True})[0] + global_region = Location.objects.create( + name="Global Region", + location_type=self.region_type, + status=self.status_active, + ) + global_region.validated_save() + self.site_type = LocationType.objects.get_or_create(name="Site")[0] + self.site_type.content_types.add(ContentType.objects.get_for_model(Device)) + self.site_type.content_types.add(ContentType.objects.get_for_model(Prefix)) + site1 = Location.objects.create( + name="Lab", + location_type=self.site_type, + status=self.status_active, + time_zone="America/Chicago", + ) + site1.validated_save() + site1.tags.set(["Test"]) + site1.validated_save() + site_note = Note.objects.create( + note="Test", + user=User.objects.first(), + assigned_object_type=ContentType.objects.get_for_model(Location), + assigned_object_id=site1.id, + ) + site_note.validated_save() + + cisco_manu = Manufacturer.objects.get_or_create(name="Cisco Meraki")[0] + cisco_manu.validated_save() + + meraki_plat = Platform.objects.get_or_create(name="Cisco Meraki")[0] + + mx84 = DeviceType.objects.create(model="MX84", manufacturer=cisco_manu) + mx84.validated_save() + + core_role = Role.objects.get_or_create(name="CORE")[0] + core_role.content_types.add(ContentType.objects.get_for_model(Device)) + + lab01 = Device.objects.create( + name="Lab01", + serial="ABC-123-456", + status=self.status_active, + role=core_role, + device_type=mx84, + platform=meraki_plat, + location=site1, + ) + lab01.validated_save() + lab01.custom_field_data["system_of_record"] = "Meraki SSoT" + lab01.custom_field_data["os_version"] = "10.1.1" + lab01.validated_save() + lab01_note = Note.objects.create( + note="Lab01 Test Note", + user=User.objects.first(), + assigned_object_type=ContentType.objects.get_for_model(Device), + assigned_object_id=lab01.id, + ) + lab01_note.validated_save() + + lab01_mgmt = Interface.objects.create( + name="wan1", + device=lab01, + enabled=True, + mode="access", + mgmt_only=True, + type="1000base-t", + status=self.status_active, + ) + lab01_mgmt.validated_save() + lab01_mgmt.custom_field_data["system_of_record"] = "Meraki SSoT" + lab01_mgmt.validated_save() + + test_ns = Namespace.objects.create(name="Test") + lab_prefix = Prefix.objects.create( + prefix="10.0.0.0/24", location=site1, namespace=test_ns, status=self.status_active + ) + lab01_mgmt_ip = IPAddress.objects.create(address="10.0.0.1/24", parent=lab_prefix, status=self.status_active) + lab_prefix.custom_field_data["system_of_record"] = "Meraki SSoT" + lab_prefix.validated_save() + lab01_mgmt_ip.custom_field_data["system_of_record"] = "Meraki SSoT" + lab01_mgmt_ip.validated_save() + IPAddressToInterface.objects.create(ip_address=lab01_mgmt_ip, interface=lab01_mgmt) + + job = MerakiDataSource() + job.logger.warning = MagicMock() + job.parent_location = global_region + job.hostname_mapping = [] + job.devicetype_mapping = [("MS", "Switch"), ("MX", "Firewall")] + job.network_loctype = self.site_type + job.tenant = None + job.job_result = JobResult.objects.create(name=job.class_path, task_name="fake task", worker="default") + self.nb_adapter = NautobotAdapter(job=job, sync=None) + + def test_data_loading(self): + """Test the load() function.""" + self.nb_adapter.load() + self.assertEqual( + {f"{site.name}__None" for site in Location.objects.filter(location_type=self.site_type)}, + {site.get_unique_id() for site in self.nb_adapter.get_all("network")}, + ) + self.assertEqual( + {dev.name for dev in Device.objects.all()}, + {dev.get_unique_id() for dev in self.nb_adapter.get_all("device")}, + ) + self.assertEqual({"wan1__Lab01"}, {port.get_unique_id() for port in self.nb_adapter.get_all("port")}) + self.assertEqual( + {f"{pf.prefix}__{pf.namespace.name}" for pf in Prefix.objects.all()}, + {pf.get_unique_id() for pf in self.nb_adapter.get_all("prefix")}, + ) + self.assertEqual( + {f"{ipaddr.address}__{ipaddr.parent.prefix}" for ipaddr in IPAddress.objects.all()}, + {ipaddr.get_unique_id() for ipaddr in self.nb_adapter.get_all("ipaddress")}, + ) + self.assertEqual( + {"10.0.0.1/24__Lab01__Test__wan1"}, + {map.get_unique_id() for map in self.nb_adapter.get_all("ipassignment")}, + ) diff --git a/nautobot_ssot/tests/meraki/test_models_nautobot.py b/nautobot_ssot/tests/meraki/test_models_nautobot.py new file mode 100644 index 000000000..998e5601e --- /dev/null +++ b/nautobot_ssot/tests/meraki/test_models_nautobot.py @@ -0,0 +1,95 @@ +"""Unit tests for Nautobot IPAM model CRUD functions.""" + +from unittest.mock import patch + +from diffsync import Adapter +from django.contrib.contenttypes.models import ContentType +from django.test import override_settings +from nautobot.core.testing import TransactionTestCase +from nautobot.dcim.models import Location, LocationType +from nautobot.extras.models import Status +from nautobot.ipam.models import Namespace, Prefix +from nautobot.tenancy.models import Tenant + +from nautobot_ssot.integrations.meraki.diffsync.models.nautobot import NautobotPrefix + + +@override_settings(PLUGINS_CONFIG={"nautobot_ssot": {"enable_meraki": True}}) +class TestNautobotPrefix(TransactionTestCase): # pylint: disable=too-many-instance-attributes + """Test the NautobotPrefix class.""" + + databases = ("default", "job_logs") + + def setUp(self): + """Configure common variables and objects for tests.""" + super().setUp() + self.status_active = Status.objects.get(name="Active") + site_lt = LocationType.objects.get_or_create(name="Site")[0] + site_lt.content_types.add(ContentType.objects.get_for_model(Prefix)) + self.test_site = Location.objects.get_or_create(name="Test", location_type=site_lt, status=self.status_active)[ + 0 + ] + self.update_site = Location.objects.get_or_create( + name="Update", location_type=site_lt, status=self.status_active + )[0] + self.test_tenant = Tenant.objects.get_or_create(name="Test")[0] + self.update_tenant = Tenant.objects.get_or_create(name="Update")[0] + self.test_ns = Namespace.objects.get_or_create(name="Test")[0] + self.prefix = Prefix.objects.create( + prefix="10.0.0.0/24", namespace=self.test_ns, status=self.status_active, tenant=self.test_tenant + ) + self.adapter = Adapter() + self.adapter.namespace_map = {"Test": self.test_ns.id, "Update": self.update_site.id} + self.adapter.site_map = {"Test": self.test_site, "Update": self.update_site} + self.adapter.tenant_map = {"Test": self.test_tenant.id, "Update": self.update_tenant.id} + self.adapter.status_map = {"Active": self.status_active.id} + self.adapter.prefix_map = {} + self.adapter.objects_to_create = {"prefixes": [], "prefix_locs": []} + self.adapter.objects_to_delete = {"prefixes": []} + + def test_create(self): + """Validate the NautobotPrefix create() method creates a Prefix.""" + self.prefix.delete() + ids = {"prefix": "10.0.0.0/24", "namespace": "Test"} + attrs = {"location": "Test", "tenant": "Test"} + result = NautobotPrefix.create(self.adapter, ids, attrs) + self.assertIsInstance(result, NautobotPrefix) + self.assertEqual(len(self.adapter.objects_to_create["prefixes"]), 1) + self.assertEqual(len(self.adapter.objects_to_create["prefix_locs"]), 1) + subnet = self.adapter.objects_to_create["prefixes"][0] + self.assertEqual(str(subnet.prefix), ids["prefix"]) + self.assertEqual(self.adapter.prefix_map[ids["prefix"]], subnet.id) + self.assertEqual(subnet.custom_field_data["system_of_record"], "Meraki SSoT") + + def test_update(self): + """Validate the NautobotPrefix update() method updates a Prefix.""" + test_pf = NautobotPrefix( + prefix="10.0.0.0/24", + namespace="Test", + location="Test", + tenant="Test", + uuid=self.prefix.id, + ) + test_pf.adapter = self.adapter + update_attrs = {"location": "Update", "tenant": "Update"} + actual = NautobotPrefix.update(self=test_pf, attrs=update_attrs) + self.prefix.refresh_from_db() + self.assertEqual(self.prefix.location, self.update_site) + self.assertEqual(self.prefix.tenant, self.update_tenant) + self.assertEqual(actual, test_pf) + + @patch("nautobot_ssot.integrations.meraki.diffsync.models.nautobot.OrmPrefix.objects.get") + def test_delete(self, mock_prefix): + """Validate the NautobotPrefix delete() deletes a Prefix.""" + test_pf = NautobotPrefix( + prefix="10.0.0.0/24", + namespace="Test", + location="Test", + tenant="Test", + uuid=self.prefix.id, + ) + test_pf.adapter = self.adapter + mock_prefix.return_value = self.prefix + test_pf.delete() + self.assertEqual(len(self.adapter.objects_to_delete["prefixes"]), 1) + self.assertEqual(self.adapter.objects_to_delete["prefixes"][0].id, self.prefix.id) diff --git a/nautobot_ssot/tests/meraki/test_utils_meraki.py b/nautobot_ssot/tests/meraki/test_utils_meraki.py new file mode 100644 index 000000000..499400c46 --- /dev/null +++ b/nautobot_ssot/tests/meraki/test_utils_meraki.py @@ -0,0 +1,126 @@ +"""Unit tests for Meraki utility functions.""" + +from unittest import TestCase +from unittest.mock import MagicMock, patch + +import meraki + +from nautobot_ssot.integrations.meraki.utils.meraki import DashboardClient +from nautobot_ssot.tests.meraki.fixtures import fixtures as fix + + +class TestDashboardClient(TestCase): + """Unit tests for the DashboardClient class.""" + + @patch("meraki.DashboardAPI") + def test_successful_connection(self, mock_api): + """Test successful connection to Meraki dashboard with valid API key and base URL.""" + logger = MagicMock() + org_id = "12345" + token = "valid_token" # noqa: S105 + dashboard_client = DashboardClient(logger, org_id, token) + + mock_api.assert_called_once_with( + api_key=token, base_url="https://api.meraki.com/api/v1/", output_log=False, print_console=False + ) + + self.assertIsNotNone(dashboard_client.conn) + self.assertEqual(dashboard_client.logger, logger) + self.assertEqual(dashboard_client.org_id, org_id) + self.assertEqual(dashboard_client.token, token) + + @patch("meraki.DashboardAPI") + def test_invalid_api_key(self, mock_api): + """Test that an Raises an exception of type 'meraki.APIError' if API key is invalid or missing.""" + mock_response = MagicMock() + mock_response.status_code = 401 + mock_response.reason = "Invalid API key" + mock_api.side_effect = meraki.APIError( + metadata={"operation": "GET", "tags": ["Failed"]}, response=mock_response + ) + + logger = MagicMock() + org_id = "12345" + token = "invalid_token" # noqa: S105 + + with self.assertRaises(meraki.APIError): + DashboardClient(logger, org_id, token) + + def test_validate_organization_exists_success_response(self): + """Test the validate_organization_exists() response is true if org ID found.""" + logger = MagicMock() + org_id = "123456789" + token = "your_api_token" # noqa: S105 + dashboard_client = DashboardClient(logger, org_id, token) + dashboard_client.conn.organizations.getOrganizations = MagicMock() + dashboard_client.conn.organizations.getOrganizations.return_value = [{"id": "123456789"}, {"id": "987654321"}] + + organization_exists = dashboard_client.validate_organization_exists() + + self.assertTrue(organization_exists) + + def test_validate_organization_exists_failure_response(self): + """Test the validate_organization_exists() response is false if wrong org ID.""" + logger = MagicMock() + org_id = "123456789" + token = "your_api_token" # noqa: S105 + dashboard_client = DashboardClient(logger, org_id, token) + dashboard_client.conn.organizations.getOrganizations = MagicMock() + dashboard_client.conn.organizations.getOrganizations.return_value = [{"id": "987654321"}] + + organization_exists = dashboard_client.validate_organization_exists() + + self.assertFalse(organization_exists) + + def test_get_org_networks(self): + """Test the get_org_networks() response is as expected.""" + logger = MagicMock() + org_id = "123456789" + token = "your_api_token" # noqa: S105 + client = DashboardClient(logger, org_id, token) + client.conn.organizations.getOrganizationNetworks = MagicMock() + client.conn.organizations.getOrganizationNetworks.return_value = fix.GET_ORG_NETWORKS_SENT_FIXTURE + + actual = client.get_org_networks() + expected = fix.GET_ORG_NETWORKS_SENT_FIXTURE + self.assertEqual(actual, expected) + self.assertEqual(client.network_map, fix.GET_ORG_NETWORKS_RECV_FIXTURE) + + def test_get_org_devices(self): + """Test the get_org_devices() response is as expected.""" + logger = MagicMock() + org_id = "123456789" + token = "your_api_token" # noqa: S105 + client = DashboardClient(logger, org_id, token) + client.conn.organizations.getOrganizationDevices = MagicMock() + client.conn.organizations.getOrganizationDevices.return_value = fix.GET_ORG_DEVICES_FIXTURE + + actual = client.get_org_devices() + expected = fix.GET_ORG_DEVICES_FIXTURE + self.assertEqual(actual, expected) + + def test_get_org_switchports(self): + """Test the get_org_switchports() response is as expected.""" + logger = MagicMock() + org_id = "123456789" + token = "your_api_token" # noqa: S105 + client = DashboardClient(logger, org_id, token) + client.conn.switch.getOrganizationSwitchPortsBySwitch = MagicMock() + client.conn.switch.getOrganizationSwitchPortsBySwitch.return_value = fix.GET_ORG_SWITCHPORTS_SENT_FIXTURE + + actual = client.get_org_switchports() + expected = fix.GET_ORG_SWITCHPORTS_RECV_FIXTURE + self.assertEqual(actual, expected) + + def test_get_org_device_statuses(self): + """Test the get_org_device_statuses() response is as expected.""" + logger = MagicMock() + org_id = "123456789" + token = "your_api_token" # noqa: S105 + client = DashboardClient(logger, org_id, token) + client.conn.organizations.getOrganizationDevicesStatuses = MagicMock() + client.conn.organizations.getOrganizationDevicesStatuses.return_value = fix.GET_ORG_DEVICE_STATUSES_SENT_FIXTURE + + actual = client.get_org_device_statuses() + expected = fix.GET_ORG_DEVICE_STATUSES_RECV_FIXTURE + self.assertEqual(actual, expected) diff --git a/nautobot_ssot/tests/test_basic.py b/nautobot_ssot/tests/test_basic.py index 83142face..d72f2d02e 100644 --- a/nautobot_ssot/tests/test_basic.py +++ b/nautobot_ssot/tests/test_basic.py @@ -11,21 +11,11 @@ class TestDocsPackaging(unittest.TestCase): def test_version(self): """Verify that pyproject.toml dev dependencies have the same versions as in the docs requirements.txt.""" - parent_path = os.path.dirname( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - ) + parent_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) poetry_path = os.path.join(parent_path, "pyproject.toml") - poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"][ - "dependencies" - ] - with open( - f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8" - ) as file: - requirements = [ - line - for line in file.read().splitlines() - if (len(line) > 0 and not line.startswith("#")) - ] + poetry_details = toml.load(poetry_path)["tool"]["poetry"]["group"]["dev"]["dependencies"] + with open(f"{parent_path}/docs/requirements.txt", "r", encoding="utf-8") as file: + requirements = [line for line in file.read().splitlines() if (len(line) > 0 and not line.startswith("#"))] for pkg in requirements: package_name = pkg if len(pkg.split("==")) == 2: # noqa: PLR2004 diff --git a/nautobot_ssot/tests/test_jobs.py b/nautobot_ssot/tests/test_jobs.py index b4052b242..de2f09cca 100644 --- a/nautobot_ssot/tests/test_jobs.py +++ b/nautobot_ssot/tests/test_jobs.py @@ -1,7 +1,7 @@ """Test the Job classes in nautobot_ssot.""" import os.path -from unittest.mock import Mock, call +from unittest.mock import Mock, call, patch from django.db.utils import IntegrityError, OperationalError from django.test import override_settings @@ -90,9 +90,53 @@ def test_run(self): self.assertIsNone(self.job.sync.sync_time) self.assertEqual(self.job.sync.source, self.job.data_source) self.assertEqual(self.job.sync.target, self.job.data_target) - self.assertTrue(self.job.dryrun) + self.assertTrue(self.job.sync.dry_run) self.assertEqual(self.job.job_result, self.job.sync.job_result) + def test_job_dryrun_false(self): + """Test the job is not ran in dryrun mode.""" + with patch.object(DataSyncBaseJob, "execute_sync") as mock_execute_sync: + isolated_job = DataSyncBaseJob() + + isolated_job.job_result = JobResult.objects.create( + name="fake job no dryrun", + task_name="fake job no dryrun", + worker="default", + ) + isolated_job.load_source_adapter = lambda *x, **y: None + isolated_job.load_target_adapter = lambda *x, **y: None + isolated_job.run(dryrun=False, memory_profiling=False) + self.assertFalse(isolated_job.sync.dry_run) + mock_execute_sync.assert_called() + + def test_job_dryrun_true(self): + """Test the job is ran in dryrun mode.""" + with patch.object(DataSyncBaseJob, "execute_sync") as mock_execute_sync: + isolated_job = DataSyncBaseJob() + + isolated_job.job_result = JobResult.objects.create( + name="fake job", + task_name="fake job", + worker="default", + ) + isolated_job.load_source_adapter = lambda *x, **y: None + isolated_job.load_target_adapter = lambda *x, **y: None + isolated_job.run(dryrun=True, memory_profiling=False) + self.assertTrue(isolated_job.sync.dry_run) + mock_execute_sync.assert_not_called() + + @patch("tracemalloc.start") + def test_job_memory_profiling_true(self, mock_malloc_start): + """Test the job is ran in dryrun mode.""" + self.job.run(dryrun=False, memory_profiling=True) + mock_malloc_start.assert_called() + + @patch("tracemalloc.start") + def test_job_memory_profiling_false(self, mock_malloc_start): + """Test the job is ran in dryrun mode.""" + self.job.run(dryrun=False, memory_profiling=False) + mock_malloc_start.assert_not_called() + def test_calculate_diff(self): """Test calculate_diff() method.""" self.job.sync = Mock() diff --git a/nautobot_ssot/urls.py b/nautobot_ssot/urls.py index ee40bc5b6..9f92a3840 100644 --- a/nautobot_ssot/urls.py +++ b/nautobot_ssot/urls.py @@ -1,6 +1,8 @@ """Django urlpatterns declaration for nautobot_ssot app.""" +from django.templatetags.static import static from django.urls import path +from django.views.generic import RedirectView from . import views from .integrations.utils import each_enabled_integration_module @@ -17,6 +19,7 @@ path("history//logs/", views.SyncLogEntriesView.as_view(), name="sync_logentries"), path("logs/", views.SyncLogEntryListView.as_view(), name="synclogentry_list"), path("config/", views.SSOTConfigView.as_view(), name="config"), + path("docs/", RedirectView.as_view(url=static("nautobot_ssot/docs/index.html")), name="docs"), ] diff --git a/nautobot_ssot/utils.py b/nautobot_ssot/utils.py index 84ea0554c..b0333b455 100644 --- a/nautobot_ssot/utils.py +++ b/nautobot_ssot/utils.py @@ -4,7 +4,7 @@ from nautobot.dcim.models import Controller, ControllerManagedDeviceGroup from nautobot.extras.choices import SecretsGroupAccessTypeChoices, SecretsGroupSecretTypeChoices -from nautobot.extras.models import SecretsGroup +from nautobot.extras.models import CustomField, SecretsGroup logger = logging.getLogger("nautobot.ssot") @@ -38,3 +38,13 @@ def verify_controller_managed_device_group(controller: Controller) -> Controller return ControllerManagedDeviceGroup.objects.get_or_create( controller=controller, defaults={"name": f"{controller.name} Managed Devices"} )[0] + + +def create_or_update_custom_field(key, field_type, label): + """Create or update a custom field object.""" + cf_dict = { + "type": field_type, + "key": key, + "label": label, + } + return CustomField.objects.update_or_create(key=cf_dict["key"], defaults=cf_dict) diff --git a/poetry.lock b/poetry.lock index b68480dca..57f5a3020 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,141 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.3" +description = "Happy Eyeballs for asyncio" +optional = true +python-versions = ">=3.8" +files = [ + {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, + {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, +] + +[[package]] +name = "aiohttp" +version = "3.10.10" +description = "Async http client/server framework (asyncio)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be7443669ae9c016b71f402e43208e13ddf00912f47f623ee5994e12fc7d4b3f"}, + {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b06b7843929e41a94ea09eb1ce3927865387e3e23ebe108e0d0d09b08d25be9"}, + {file = "aiohttp-3.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:333cf6cf8e65f6a1e06e9eb3e643a0c515bb850d470902274239fea02033e9a8"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274cfa632350225ce3fdeb318c23b4a10ec25c0e2c880eff951a3842cf358ac1"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9e5e4a85bdb56d224f412d9c98ae4cbd032cc4f3161818f692cd81766eee65a"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b606353da03edcc71130b52388d25f9a30a126e04caef1fd637e31683033abd"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab5a5a0c7a7991d90446a198689c0535be89bbd6b410a1f9a66688f0880ec026"}, + {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578a4b875af3e0daaf1ac6fa983d93e0bbfec3ead753b6d6f33d467100cdc67b"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8105fd8a890df77b76dd3054cddf01a879fc13e8af576805d667e0fa0224c35d"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3bcd391d083f636c06a68715e69467963d1f9600f85ef556ea82e9ef25f043f7"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fbc6264158392bad9df19537e872d476f7c57adf718944cc1e4495cbabf38e2a"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e48d5021a84d341bcaf95c8460b152cfbad770d28e5fe14a768988c461b821bc"}, + {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2609e9ab08474702cc67b7702dbb8a80e392c54613ebe80db7e8dbdb79837c68"}, + {file = "aiohttp-3.10.10-cp310-cp310-win32.whl", hash = "sha256:84afcdea18eda514c25bc68b9af2a2b1adea7c08899175a51fe7c4fb6d551257"}, + {file = "aiohttp-3.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:9c72109213eb9d3874f7ac8c0c5fa90e072d678e117d9061c06e30c85b4cf0e6"}, + {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c30a0eafc89d28e7f959281b58198a9fa5e99405f716c0289b7892ca345fe45f"}, + {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:258c5dd01afc10015866114e210fb7365f0d02d9d059c3c3415382ab633fcbcb"}, + {file = "aiohttp-3.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:15ecd889a709b0080f02721255b3f80bb261c2293d3c748151274dfea93ac871"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3935f82f6f4a3820270842e90456ebad3af15810cf65932bd24da4463bc0a4c"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:413251f6fcf552a33c981c4709a6bba37b12710982fec8e558ae944bfb2abd38"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1720b4f14c78a3089562b8875b53e36b51c97c51adc53325a69b79b4b48ebcb"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:679abe5d3858b33c2cf74faec299fda60ea9de62916e8b67e625d65bf069a3b7"}, + {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79019094f87c9fb44f8d769e41dbb664d6e8fcfd62f665ccce36762deaa0e911"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2fb38c2ed905a2582948e2de560675e9dfbee94c6d5ccdb1301c6d0a5bf092"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a3f00003de6eba42d6e94fabb4125600d6e484846dbf90ea8e48a800430cc142"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1bbb122c557a16fafc10354b9d99ebf2f2808a660d78202f10ba9d50786384b9"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:30ca7c3b94708a9d7ae76ff281b2f47d8eaf2579cd05971b5dc681db8caac6e1"}, + {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:df9270660711670e68803107d55c2b5949c2e0f2e4896da176e1ecfc068b974a"}, + {file = "aiohttp-3.10.10-cp311-cp311-win32.whl", hash = "sha256:aafc8ee9b742ce75044ae9a4d3e60e3d918d15a4c2e08a6c3c3e38fa59b92d94"}, + {file = "aiohttp-3.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:362f641f9071e5f3ee6f8e7d37d5ed0d95aae656adf4ef578313ee585b585959"}, + {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9294bbb581f92770e6ed5c19559e1e99255e4ca604a22c5c6397b2f9dd3ee42c"}, + {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8fa23fe62c436ccf23ff930149c047f060c7126eae3ccea005f0483f27b2e28"}, + {file = "aiohttp-3.10.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c6a5b8c7926ba5d8545c7dd22961a107526562da31a7a32fa2456baf040939f"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:007ec22fbc573e5eb2fb7dec4198ef8f6bf2fe4ce20020798b2eb5d0abda6138"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9627cc1a10c8c409b5822a92d57a77f383b554463d1884008e051c32ab1b3742"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50edbcad60d8f0e3eccc68da67f37268b5144ecc34d59f27a02f9611c1d4eec7"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a45d85cf20b5e0d0aa5a8dca27cce8eddef3292bc29d72dcad1641f4ed50aa16"}, + {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b00807e2605f16e1e198f33a53ce3c4523114059b0c09c337209ae55e3823a8"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f2d4324a98062be0525d16f768a03e0bbb3b9fe301ceee99611dc9a7953124e6"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438cd072f75bb6612f2aca29f8bd7cdf6e35e8f160bc312e49fbecab77c99e3a"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:baa42524a82f75303f714108fea528ccacf0386af429b69fff141ffef1c534f9"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a7d8d14fe962153fc681f6366bdec33d4356f98a3e3567782aac1b6e0e40109a"}, + {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1277cd707c465cd09572a774559a3cc7c7a28802eb3a2a9472588f062097205"}, + {file = "aiohttp-3.10.10-cp312-cp312-win32.whl", hash = "sha256:59bb3c54aa420521dc4ce3cc2c3fe2ad82adf7b09403fa1f48ae45c0cbde6628"}, + {file = "aiohttp-3.10.10-cp312-cp312-win_amd64.whl", hash = "sha256:0e1b370d8007c4ae31ee6db7f9a2fe801a42b146cec80a86766e7ad5c4a259cf"}, + {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad7593bb24b2ab09e65e8a1d385606f0f47c65b5a2ae6c551db67d6653e78c28"}, + {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1eb89d3d29adaf533588f209768a9c02e44e4baf832b08118749c5fad191781d"}, + {file = "aiohttp-3.10.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3fe407bf93533a6fa82dece0e74dbcaaf5d684e5a51862887f9eaebe6372cd79"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aed5155f819873d23520919e16703fc8925e509abbb1a1491b0087d1cd969e"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f05e9727ce409358baa615dbeb9b969db94324a79b5a5cea45d39bdb01d82e6"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dffb610a30d643983aeb185ce134f97f290f8935f0abccdd32c77bed9388b42"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa6658732517ddabe22c9036479eabce6036655ba87a0224c612e1ae6af2087e"}, + {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:741a46d58677d8c733175d7e5aa618d277cd9d880301a380fd296975a9cdd7bc"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e00e3505cd80440f6c98c6d69269dcc2a119f86ad0a9fd70bccc59504bebd68a"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ffe595f10566f8276b76dc3a11ae4bb7eba1aac8ddd75811736a15b0d5311414"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdfcf6443637c148c4e1a20c48c566aa694fa5e288d34b20fcdc58507882fed3"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d183cf9c797a5291e8301790ed6d053480ed94070637bfaad914dd38b0981f67"}, + {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:77abf6665ae54000b98b3c742bc6ea1d1fb31c394bcabf8b5d2c1ac3ebfe7f3b"}, + {file = "aiohttp-3.10.10-cp313-cp313-win32.whl", hash = "sha256:4470c73c12cd9109db8277287d11f9dd98f77fc54155fc71a7738a83ffcc8ea8"}, + {file = "aiohttp-3.10.10-cp313-cp313-win_amd64.whl", hash = "sha256:486f7aabfa292719a2753c016cc3a8f8172965cabb3ea2e7f7436c7f5a22a151"}, + {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1b66ccafef7336a1e1f0e389901f60c1d920102315a56df85e49552308fc0486"}, + {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:acd48d5b80ee80f9432a165c0ac8cbf9253eaddb6113269a5e18699b33958dbb"}, + {file = "aiohttp-3.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3455522392fb15ff549d92fbf4b73b559d5e43dc522588f7eb3e54c3f38beee7"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c3b868724137f713a38376fef8120c166d1eadd50da1855c112fe97954aed8"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da1dee8948d2137bb51fbb8a53cce6b1bcc86003c6b42565f008438b806cccd8"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5ce2ce7c997e1971b7184ee37deb6ea9922ef5163c6ee5aa3c274b05f9e12fa"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28529e08fde6f12eba8677f5a8608500ed33c086f974de68cc65ab218713a59d"}, + {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7db54c7914cc99d901d93a34704833568d86c20925b2762f9fa779f9cd2e70f"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03a42ac7895406220124c88911ebee31ba8b2d24c98507f4a8bf826b2937c7f2"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e338c0523d024fad378b376a79faff37fafb3c001872a618cde1d322400a572"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:038f514fe39e235e9fef6717fbf944057bfa24f9b3db9ee551a7ecf584b5b480"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:64f6c17757251e2b8d885d728b6433d9d970573586a78b78ba8929b0f41d045a"}, + {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:93429602396f3383a797a2a70e5f1de5df8e35535d7806c9f91df06f297e109b"}, + {file = "aiohttp-3.10.10-cp38-cp38-win32.whl", hash = "sha256:c823bc3971c44ab93e611ab1a46b1eafeae474c0c844aff4b7474287b75fe49c"}, + {file = "aiohttp-3.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:54ca74df1be3c7ca1cf7f4c971c79c2daf48d9aa65dea1a662ae18926f5bc8ce"}, + {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01948b1d570f83ee7bbf5a60ea2375a89dfb09fd419170e7f5af029510033d24"}, + {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fc1500fd2a952c5c8e3b29aaf7e3cc6e27e9cfc0a8819b3bce48cc1b849e4cc"}, + {file = "aiohttp-3.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f614ab0c76397661b90b6851a030004dac502e48260ea10f2441abd2207fbcc7"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00819de9e45d42584bed046314c40ea7e9aea95411b38971082cad449392b08c"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05646ebe6b94cc93407b3bf34b9eb26c20722384d068eb7339de802154d61bc5"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998f3bd3cfc95e9424a6acd7840cbdd39e45bc09ef87533c006f94ac47296090"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9010c31cd6fa59438da4e58a7f19e4753f7f264300cd152e7f90d4602449762"}, + {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ea7ffc6d6d6f8a11e6f40091a1040995cdff02cfc9ba4c2f30a516cb2633554"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ef9c33cc5cbca35808f6c74be11eb7f5f6b14d2311be84a15b594bd3e58b5527"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce0cdc074d540265bfeb31336e678b4e37316849d13b308607efa527e981f5c2"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:597a079284b7ee65ee102bc3a6ea226a37d2b96d0418cc9047490f231dc09fe8"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7789050d9e5d0c309c706953e5e8876e38662d57d45f936902e176d19f1c58ab"}, + {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e7f8b04d83483577fd9200461b057c9f14ced334dcb053090cea1da9c8321a91"}, + {file = "aiohttp-3.10.10-cp39-cp39-win32.whl", hash = "sha256:c02a30b904282777d872266b87b20ed8cc0d1501855e27f831320f471d54d983"}, + {file = "aiohttp-3.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:edfe3341033a6b53a5c522c802deb2079eee5cbfbb0af032a55064bd65c73a23"}, + {file = "aiohttp-3.10.10.tar.gz", hash = "sha256:0631dd7c9f0822cc61c88586ca76d5b5ada26538097d0f1df510b082bad3411a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.12.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = true +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" [[package]] name = "alabaster" @@ -52,13 +189,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anyio" -version = "4.4.0" +version = "4.5.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = true python-versions = ">=3.8" files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, + {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, ] [package.dependencies] @@ -68,9 +205,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "appnope" @@ -184,18 +321,18 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "autopep8" -version = "2.0.0" +version = "2.3.1" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "autopep8-2.0.0-py2.py3-none-any.whl", hash = "sha256:ad924b42c2e27a1ac58e432166cc4588f5b80747de02d0d35b1ecbd3e7d57207"}, - {file = "autopep8-2.0.0.tar.gz", hash = "sha256:8b1659c7f003e693199f52caffdc06585bb0716900bbc6a7442fd931d658c077"}, + {file = "autopep8-2.3.1-py2.py3-none-any.whl", hash = "sha256:a203fe0fcad7939987422140ab17a930f684763bf7335bdb6709991dd7ef6c2d"}, + {file = "autopep8-2.3.1.tar.gz", hash = "sha256:8d6c87eba648fdcfc83e29b788910b8643171c395d9c4bcf115ece035b9c9dda"}, ] [package.dependencies] -pycodestyle = ">=2.9.1" -tomli = "*" +pycodestyle = ">=2.12.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} [[package]] name = "babel" @@ -258,13 +395,13 @@ tzdata = ["tzdata"] [[package]] name = "billiard" -version = "4.2.0" +version = "4.2.1" description = "Python multiprocessing fork with improvements and bugfixes" optional = false python-versions = ">=3.7" files = [ - {file = "billiard-4.2.0-py3-none-any.whl", hash = "sha256:07aa978b308f334ff8282bd4a746e681b3513db5c9a514cbdd810cbbdc19714d"}, - {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, + {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, + {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, ] [[package]] @@ -336,89 +473,89 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "cffi" -version = "1.17.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" files = [ - {file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, - {file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, - {file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, - {file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, - {file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, - {file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, - {file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, - {file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, - {file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, - {file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, - {file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, - {file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, - {file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, - {file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, - {file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, - {file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, - {file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, - {file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, - {file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, - {file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, - {file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, - {file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, - {file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, - {file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, - {file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, - {file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, - {file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, - {file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, - {file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, - {file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, - {file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -426,101 +563,116 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -796,13 +948,13 @@ toml = ["tomli"] [[package]] name = "cron-descriptor" -version = "1.4.3" +version = "1.4.5" description = "A Python library that converts cron expressions into human readable strings." optional = false python-versions = "*" files = [ - {file = "cron_descriptor-1.4.3-py3-none-any.whl", hash = "sha256:a67ba21804983b1427ed7f3e1ec27ee77bf24c652b0430239c268c5ddfbf9dc0"}, - {file = "cron_descriptor-1.4.3.tar.gz", hash = "sha256:7b1a00d7d25d6ae6896c0da4457e790b98cba778398a3d48e341e5e0d33f0488"}, + {file = "cron_descriptor-1.4.5-py3-none-any.whl", hash = "sha256:736b3ae9d1a99bc3dbfc5b55b5e6e7c12031e7ba5de716625772f8b02dcd6013"}, + {file = "cron_descriptor-1.4.5.tar.gz", hash = "sha256:f51ce4ffc1d1f2816939add8524f206c376a42c87a5fca3091ce26725b3b1bca"}, ] [package.extras] @@ -957,13 +1109,13 @@ redis = ["redis (>=4.3,<5.0)"] [[package]] name = "dill" -version = "0.3.8" +version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" files = [ - {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, - {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, ] [package.extras] @@ -972,13 +1124,13 @@ profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "django" -version = "4.2.15" +version = "4.2.16" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"}, - {file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"}, + {file = "Django-4.2.16-py3-none-any.whl", hash = "sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898"}, + {file = "Django-4.2.16.tar.gz", hash = "sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad"}, ] [package.dependencies] @@ -1251,6 +1403,7 @@ files = [ [package.dependencies] asgiref = ">=3.6.0" +celery = {version = ">=5.1", optional = true, markers = "extra == \"celery\""} django = ">=4.2" django-ipware = ">=6.0.2" structlog = ">=21.4.0" @@ -1357,13 +1510,13 @@ django = ">=4.2" [[package]] name = "dnacentersdk" -version = "2.7.3" +version = "2.7.4" description = "Cisco DNA Center Platform SDK" optional = true python-versions = "<4.0,>=3.8" files = [ - {file = "dnacentersdk-2.7.3-py3-none-any.whl", hash = "sha256:292fcf65638843977658024fa41888cb736a2ef58a601ac167e9bfdca97b4fcd"}, - {file = "dnacentersdk-2.7.3.tar.gz", hash = "sha256:e3dd541f580a1f33e22e9a402d67629467b530c54c3cdb5356aa4744ee71b048"}, + {file = "dnacentersdk-2.7.4-py3-none-any.whl", hash = "sha256:befab14a8a7a2dc3e6f4a51e66ec3bf3937f97dbad091098670d1a0620eb87a3"}, + {file = "dnacentersdk-2.7.4.tar.gz", hash = "sha256:91cbe7c7664afd59afca9150819cf45c5b6d0705e218dadf21fee1882a2f0802"}, ] [package.dependencies] @@ -1488,13 +1641,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.1" +version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, - {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, ] [package.extras] @@ -1516,53 +1669,59 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "fonttools" -version = "4.53.1" +version = "4.54.1" description = "Tools to manipulate font files" optional = true python-versions = ">=3.8" files = [ - {file = "fonttools-4.53.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0679a30b59d74b6242909945429dbddb08496935b82f91ea9bf6ad240ec23397"}, - {file = "fonttools-4.53.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8bf06b94694251861ba7fdeea15c8ec0967f84c3d4143ae9daf42bbc7717fe3"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b96cd370a61f4d083c9c0053bf634279b094308d52fdc2dd9a22d8372fdd590d"}, - {file = "fonttools-4.53.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1c7c5aa18dd3b17995898b4a9b5929d69ef6ae2af5b96d585ff4005033d82f0"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e013aae589c1c12505da64a7d8d023e584987e51e62006e1bb30d72f26522c41"}, - {file = "fonttools-4.53.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9efd176f874cb6402e607e4cc9b4a9cd584d82fc34a4b0c811970b32ba62501f"}, - {file = "fonttools-4.53.1-cp310-cp310-win32.whl", hash = "sha256:c8696544c964500aa9439efb6761947393b70b17ef4e82d73277413f291260a4"}, - {file = "fonttools-4.53.1-cp310-cp310-win_amd64.whl", hash = "sha256:8959a59de5af6d2bec27489e98ef25a397cfa1774b375d5787509c06659b3671"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da33440b1413bad53a8674393c5d29ce64d8c1a15ef8a77c642ffd900d07bfe1"}, - {file = "fonttools-4.53.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ff7e5e9bad94e3a70c5cd2fa27f20b9bb9385e10cddab567b85ce5d306ea923"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6e7170d675d12eac12ad1a981d90f118c06cf680b42a2d74c6c931e54b50719"}, - {file = "fonttools-4.53.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bee32ea8765e859670c4447b0817514ca79054463b6b79784b08a8df3a4d78e3"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e08f572625a1ee682115223eabebc4c6a2035a6917eac6f60350aba297ccadb"}, - {file = "fonttools-4.53.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b21952c092ffd827504de7e66b62aba26fdb5f9d1e435c52477e6486e9d128b2"}, - {file = "fonttools-4.53.1-cp311-cp311-win32.whl", hash = "sha256:9dfdae43b7996af46ff9da520998a32b105c7f098aeea06b2226b30e74fbba88"}, - {file = "fonttools-4.53.1-cp311-cp311-win_amd64.whl", hash = "sha256:d4d0096cb1ac7a77b3b41cd78c9b6bc4a400550e21dc7a92f2b5ab53ed74eb02"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d92d3c2a1b39631a6131c2fa25b5406855f97969b068e7e08413325bc0afba58"}, - {file = "fonttools-4.53.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3c8ebafbee8d9002bd8f1195d09ed2bd9ff134ddec37ee8f6a6375e6a4f0e8"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f029c095ad66c425b0ee85553d0dc326d45d7059dbc227330fc29b43e8ba60"}, - {file = "fonttools-4.53.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10f5e6c3510b79ea27bb1ebfcc67048cde9ec67afa87c7dd7efa5c700491ac7f"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f677ce218976496a587ab17140da141557beb91d2a5c1a14212c994093f2eae2"}, - {file = "fonttools-4.53.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9e6ceba2a01b448e36754983d376064730690401da1dd104ddb543519470a15f"}, - {file = "fonttools-4.53.1-cp312-cp312-win32.whl", hash = "sha256:791b31ebbc05197d7aa096bbc7bd76d591f05905d2fd908bf103af4488e60670"}, - {file = "fonttools-4.53.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ed170b5e17da0264b9f6fae86073be3db15fa1bd74061c8331022bca6d09bab"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c818c058404eb2bba05e728d38049438afd649e3c409796723dfc17cd3f08749"}, - {file = "fonttools-4.53.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:651390c3b26b0c7d1f4407cad281ee7a5a85a31a110cbac5269de72a51551ba2"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54f1bba2f655924c1138bbc7fa91abd61f45c68bd65ab5ed985942712864bbb"}, - {file = "fonttools-4.53.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9cd19cf4fe0595ebdd1d4915882b9440c3a6d30b008f3cc7587c1da7b95be5f"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2af40ae9cdcb204fc1d8f26b190aa16534fcd4f0df756268df674a270eab575d"}, - {file = "fonttools-4.53.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:35250099b0cfb32d799fb5d6c651220a642fe2e3c7d2560490e6f1d3f9ae9169"}, - {file = "fonttools-4.53.1-cp38-cp38-win32.whl", hash = "sha256:f08df60fbd8d289152079a65da4e66a447efc1d5d5a4d3f299cdd39e3b2e4a7d"}, - {file = "fonttools-4.53.1-cp38-cp38-win_amd64.whl", hash = "sha256:7b6b35e52ddc8fb0db562133894e6ef5b4e54e1283dff606fda3eed938c36fc8"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75a157d8d26c06e64ace9df037ee93a4938a4606a38cb7ffaf6635e60e253b7a"}, - {file = "fonttools-4.53.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4824c198f714ab5559c5be10fd1adf876712aa7989882a4ec887bf1ef3e00e31"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:becc5d7cb89c7b7afa8321b6bb3dbee0eec2b57855c90b3e9bf5fb816671fa7c"}, - {file = "fonttools-4.53.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84ec3fb43befb54be490147b4a922b5314e16372a643004f182babee9f9c3407"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:73379d3ffdeecb376640cd8ed03e9d2d0e568c9d1a4e9b16504a834ebadc2dfb"}, - {file = "fonttools-4.53.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:02569e9a810f9d11f4ae82c391ebc6fb5730d95a0657d24d754ed7763fb2d122"}, - {file = "fonttools-4.53.1-cp39-cp39-win32.whl", hash = "sha256:aae7bd54187e8bf7fd69f8ab87b2885253d3575163ad4d669a262fe97f0136cb"}, - {file = "fonttools-4.53.1-cp39-cp39-win_amd64.whl", hash = "sha256:e5b708073ea3d684235648786f5f6153a48dc8762cdfe5563c57e80787c29fbb"}, - {file = "fonttools-4.53.1-py3-none-any.whl", hash = "sha256:f1f8758a2ad110bd6432203a344269f445a2907dc24ef6bccfd0ac4e14e0d71d"}, - {file = "fonttools-4.53.1.tar.gz", hash = "sha256:e128778a8e9bc11159ce5447f76766cefbd876f44bd79aff030287254e4752c4"}, + {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"}, + {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"}, + {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"}, + {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"}, + {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"}, + {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"}, + {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"}, + {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"}, + {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"}, + {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"}, + {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"}, + {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"}, + {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"}, + {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"}, + {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"}, + {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"}, + {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"}, + {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"}, + {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"}, + {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"}, + {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"}, + {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"}, + {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"}, + {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"}, + {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"}, + {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"}, + {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"}, + {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"}, + {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"}, + {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"}, + {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"}, + {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"}, + {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"}, + {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"}, + {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"}, + {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"}, + {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"}, + {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"}, + {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"}, + {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"}, + {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"}, + {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"}, + {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"}, + {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"}, + {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"}, + {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"}, + {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"}, + {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"}, ] [package.extras] @@ -1579,6 +1738,92 @@ ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] +[[package]] +name = "frozenlist" +version = "1.4.1" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = true +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, + {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, + {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, + {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, + {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, + {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, + {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, + {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, + {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, + {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, + {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, + {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, + {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, + {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, + {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, + {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, + {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, + {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, + {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, + {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, + {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, + {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, + {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, + {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, + {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, + {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, + {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, + {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +] + [[package]] name = "ghp-import" version = "2.1.0" @@ -1748,61 +1993,70 @@ colorama = ">=0.4" [[package]] name = "grpcio" -version = "1.65.5" +version = "1.67.0" description = "HTTP/2-based RPC framework" optional = true python-versions = ">=3.8" files = [ - {file = "grpcio-1.65.5-cp310-cp310-linux_armv7l.whl", hash = "sha256:b67d450f1e008fedcd81e097a3a400a711d8be1a8b20f852a7b8a73fead50fe3"}, - {file = "grpcio-1.65.5-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a70a20eed87bba647a38bedd93b3ce7db64b3f0e8e0952315237f7f5ca97b02d"}, - {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:f79c87c114bf37adf408026b9e2e333fe9ff31dfc9648f6f80776c513145c813"}, - {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17f9fa2d947dbfaca01b3ab2c62eefa8240131fdc67b924eb42ce6032e3e5c1"}, - {file = "grpcio-1.65.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d60e18ff7c34fe3f6db3d35ad5c6dc99f5b43ff3982cb26fad4174462d10b1"}, - {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fe6505376f5b00bb008e4e1418152e3ad3d954b629da286c7913ff3cfc0ff740"}, - {file = "grpcio-1.65.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:33158e56c6378063923c417e9fbdb28660b6e0e2835af42e67f5a7793f587af7"}, - {file = "grpcio-1.65.5-cp310-cp310-win32.whl", hash = "sha256:1cbc208edb9acf1cc339396a1a36b83796939be52f34e591c90292045b579fbf"}, - {file = "grpcio-1.65.5-cp310-cp310-win_amd64.whl", hash = "sha256:bc74f3f745c37e2c5685c9d2a2d5a94de00f286963f5213f763ae137bf4f2358"}, - {file = "grpcio-1.65.5-cp311-cp311-linux_armv7l.whl", hash = "sha256:3207ae60d07e5282c134b6e02f9271a2cb523c6d7a346c6315211fe2bf8d61ed"}, - {file = "grpcio-1.65.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a2f80510f99f82d4eb825849c486df703f50652cea21c189eacc2b84f2bde764"}, - {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:a80e9a5e3f93c54f5eb82a3825ea1fc4965b2fa0026db2abfecb139a5c4ecdf1"}, - {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2944390a496567de9e70418f3742b477d85d8ca065afa90432edc91b4bb8ad"}, - {file = "grpcio-1.65.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3655139d7be213c32c79ef6fb2367cae28e56ef68e39b1961c43214b457f257"}, - {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05f02d68fc720e085f061b704ee653b181e6d5abfe315daef085719728d3d1fd"}, - {file = "grpcio-1.65.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1c4caafe71aef4dabf53274bbf4affd6df651e9f80beedd6b8e08ff438ed3260"}, - {file = "grpcio-1.65.5-cp311-cp311-win32.whl", hash = "sha256:84c901cdec16a092099f251ef3360d15e29ef59772150fa261d94573612539b5"}, - {file = "grpcio-1.65.5-cp311-cp311-win_amd64.whl", hash = "sha256:11f8b16121768c1cb99d7dcb84e01510e60e6a206bf9123e134118802486f035"}, - {file = "grpcio-1.65.5-cp312-cp312-linux_armv7l.whl", hash = "sha256:ee6ed64a27588a2c94e8fa84fe8f3b5c89427d4d69c37690903d428ec61ca7e4"}, - {file = "grpcio-1.65.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:76991b7a6fb98630a3328839755181ce7c1aa2b1842aa085fd4198f0e5198960"}, - {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:89c00a18801b1ed9cc441e29b521c354725d4af38c127981f2c950c796a09b6e"}, - {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:078038e150a897e5e402ed3d57f1d31ebf604cbed80f595bd281b5da40762a92"}, - {file = "grpcio-1.65.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97962720489ef31b5ad8a916e22bc31bba3664e063fb9f6702dce056d4aa61b"}, - {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b8270b15b99781461b244f5c81d5c2bc9696ab9189fb5ff86c841417fb3b39fe"}, - {file = "grpcio-1.65.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8e5c4c15ac3fe1eb68e46bc51e66ad29be887479f231f8237cf8416058bf0cc1"}, - {file = "grpcio-1.65.5-cp312-cp312-win32.whl", hash = "sha256:f5b5970341359341d0e4c789da7568264b2a89cd976c05ea476036852b5950cd"}, - {file = "grpcio-1.65.5-cp312-cp312-win_amd64.whl", hash = "sha256:238a625f391a1b9f5f069bdc5930f4fd71b74426bea52196fc7b83f51fa97d34"}, - {file = "grpcio-1.65.5-cp38-cp38-linux_armv7l.whl", hash = "sha256:6c4e62bcf297a1568f627f39576dbfc27f1e5338a691c6dd5dd6b3979da51d1c"}, - {file = "grpcio-1.65.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d7df567b67d16d4177835a68d3f767bbcbad04da9dfb52cbd19171f430c898bd"}, - {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b7ca419f1462390851eec395b2089aad1e49546b52d4e2c972ceb76da69b10f8"}, - {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa36dd8496d3af0d40165252a669fa4f6fd2db4b4026b9a9411cbf060b9d6a15"}, - {file = "grpcio-1.65.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a101696f9ece90a0829988ff72f1b1ea2358f3df035bdf6d675dd8b60c2c0894"}, - {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2a6d8169812932feac514b420daffae8ab8e36f90f3122b94ae767e633296b17"}, - {file = "grpcio-1.65.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:47d0aaaab82823f0aa6adea5184350b46e2252e13a42a942db84da5b733f2e05"}, - {file = "grpcio-1.65.5-cp38-cp38-win32.whl", hash = "sha256:85ae8f8517d5bcc21fb07dbf791e94ed84cc28f84c903cdc2bd7eaeb437c8f45"}, - {file = "grpcio-1.65.5-cp38-cp38-win_amd64.whl", hash = "sha256:770bd4bd721961f6dd8049bc27338564ba8739913f77c0f381a9815e465ff965"}, - {file = "grpcio-1.65.5-cp39-cp39-linux_armv7l.whl", hash = "sha256:ab5ec837d8cee8dbce9ef6386125f119b231e4333cc6b6d57b6c5c7c82a72331"}, - {file = "grpcio-1.65.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cabd706183ee08d8026a015af5819a0b3a8959bdc9d1f6fdacd1810f09200f2a"}, - {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:ec71fc5b39821ad7d80db7473c8f8c2910f3382f0ddadfbcfc2c6c437107eb67"}, - {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a9e35bcb045e39d7cac30464c285389b9a816ac2067e4884ad2c02e709ef8e"}, - {file = "grpcio-1.65.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d750e9330eb14236ca11b78d0c494eed13d6a95eb55472298f0e547c165ee324"}, - {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2b91ce647b6307f25650872454a4d02a2801f26a475f90d0b91ed8110baae589"}, - {file = "grpcio-1.65.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8da58ff80bc4556cf29bc03f5fff1f03b8387d6aaa7b852af9eb65b2cf833be4"}, - {file = "grpcio-1.65.5-cp39-cp39-win32.whl", hash = "sha256:7a412959aa5f08c5ac04aa7b7c3c041f5e4298cadd4fcc2acff195b56d185ebc"}, - {file = "grpcio-1.65.5-cp39-cp39-win_amd64.whl", hash = "sha256:55714ea852396ec9568f45f487639945ab674de83c12bea19d5ddbc3ae41ada3"}, - {file = "grpcio-1.65.5.tar.gz", hash = "sha256:ec6f219fb5d677a522b0deaf43cea6697b16f338cb68d009e30930c4aa0d2209"}, + {file = "grpcio-1.67.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:bd79929b3bb96b54df1296cd3bf4d2b770bd1df6c2bdf549b49bab286b925cdc"}, + {file = "grpcio-1.67.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:16724ffc956ea42967f5758c2f043faef43cb7e48a51948ab593570570d1e68b"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:2b7183c80b602b0ad816315d66f2fb7887614ead950416d60913a9a71c12560d"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe32b45dd6d118f5ea2e5deaed417d8a14976325c93812dd831908522b402c9"}, + {file = "grpcio-1.67.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe89295219b9c9e47780a0f1c75ca44211e706d1c598242249fe717af3385ec8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa8d025fae1595a207b4e47c2e087cb88d47008494db258ac561c00877d4c8f8"}, + {file = "grpcio-1.67.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f95e15db43e75a534420e04822df91f645664bf4ad21dfaad7d51773c80e6bb4"}, + {file = "grpcio-1.67.0-cp310-cp310-win32.whl", hash = "sha256:a6b9a5c18863fd4b6624a42e2712103fb0f57799a3b29651c0e5b8119a519d65"}, + {file = "grpcio-1.67.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6eb68493a05d38b426604e1dc93bfc0137c4157f7ab4fac5771fd9a104bbaa6"}, + {file = "grpcio-1.67.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:e91d154689639932305b6ea6f45c6e46bb51ecc8ea77c10ef25aa77f75443ad4"}, + {file = "grpcio-1.67.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cb204a742997277da678611a809a8409657b1398aaeebf73b3d9563b7d154c13"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:ae6de510f670137e755eb2a74b04d1041e7210af2444103c8c95f193340d17ee"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74b900566bdf68241118f2918d312d3bf554b2ce0b12b90178091ea7d0a17b3d"}, + {file = "grpcio-1.67.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e95e43447a02aa603abcc6b5e727d093d161a869c83b073f50b9390ecf0fa8"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0bb94e66cd8f0baf29bd3184b6aa09aeb1a660f9ec3d85da615c5003154bc2bf"}, + {file = "grpcio-1.67.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:82e5bd4b67b17c8c597273663794a6a46a45e44165b960517fe6d8a2f7f16d23"}, + {file = "grpcio-1.67.0-cp311-cp311-win32.whl", hash = "sha256:7fc1d2b9fd549264ae585026b266ac2db53735510a207381be509c315b4af4e8"}, + {file = "grpcio-1.67.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac11ecb34a86b831239cc38245403a8de25037b448464f95c3315819e7519772"}, + {file = "grpcio-1.67.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:227316b5631260e0bef8a3ce04fa7db4cc81756fea1258b007950b6efc90c05d"}, + {file = "grpcio-1.67.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d90cfdafcf4b45a7a076e3e2a58e7bc3d59c698c4f6470b0bb13a4d869cf2273"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:77196216d5dd6f99af1c51e235af2dd339159f657280e65ce7e12c1a8feffd1d"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15c05a26a0f7047f720da41dc49406b395c1470eef44ff7e2c506a47ac2c0591"}, + {file = "grpcio-1.67.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3840994689cc8cbb73d60485c594424ad8adb56c71a30d8948d6453083624b52"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5a1e03c3102b6451028d5dc9f8591131d6ab3c8a0e023d94c28cb930ed4b5f81"}, + {file = "grpcio-1.67.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:682968427a63d898759474e3b3178d42546e878fdce034fd7474ef75143b64e3"}, + {file = "grpcio-1.67.0-cp312-cp312-win32.whl", hash = "sha256:d01793653248f49cf47e5695e0a79805b1d9d4eacef85b310118ba1dfcd1b955"}, + {file = "grpcio-1.67.0-cp312-cp312-win_amd64.whl", hash = "sha256:985b2686f786f3e20326c4367eebdaed3e7aa65848260ff0c6644f817042cb15"}, + {file = "grpcio-1.67.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:8c9a35b8bc50db35ab8e3e02a4f2a35cfba46c8705c3911c34ce343bd777813a"}, + {file = "grpcio-1.67.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:42199e704095b62688998c2d84c89e59a26a7d5d32eed86d43dc90e7a3bd04aa"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:c4c425f440fb81f8d0237c07b9322fc0fb6ee2b29fbef5f62a322ff8fcce240d"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:323741b6699cd2b04a71cb38f502db98f90532e8a40cb675393d248126a268af"}, + {file = "grpcio-1.67.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:662c8e105c5e5cee0317d500eb186ed7a93229586e431c1bf0c9236c2407352c"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:f6bd2ab135c64a4d1e9e44679a616c9bc944547357c830fafea5c3caa3de5153"}, + {file = "grpcio-1.67.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2f55c1e0e2ae9bdd23b3c63459ee4c06d223b68aeb1961d83c48fb63dc29bc03"}, + {file = "grpcio-1.67.0-cp313-cp313-win32.whl", hash = "sha256:fd6bc27861e460fe28e94226e3673d46e294ca4673d46b224428d197c5935e69"}, + {file = "grpcio-1.67.0-cp313-cp313-win_amd64.whl", hash = "sha256:cf51d28063338608cd8d3cd64677e922134837902b70ce00dad7f116e3998210"}, + {file = "grpcio-1.67.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:7f200aca719c1c5dc72ab68be3479b9dafccdf03df530d137632c534bb6f1ee3"}, + {file = "grpcio-1.67.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0892dd200ece4822d72dd0952f7112c542a487fc48fe77568deaaa399c1e717d"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:f4d613fbf868b2e2444f490d18af472ccb47660ea3df52f068c9c8801e1f3e85"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c69bf11894cad9da00047f46584d5758d6ebc9b5950c0dc96fec7e0bce5cde9"}, + {file = "grpcio-1.67.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9bca3ca0c5e74dea44bf57d27e15a3a3996ce7e5780d61b7c72386356d231db"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:014dfc020e28a0d9be7e93a91f85ff9f4a87158b7df9952fe23cc42d29d31e1e"}, + {file = "grpcio-1.67.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d4ea4509d42c6797539e9ec7496c15473177ce9abc89bc5c71e7abe50fc25737"}, + {file = "grpcio-1.67.0-cp38-cp38-win32.whl", hash = "sha256:9d75641a2fca9ae1ae86454fd25d4c298ea8cc195dbc962852234d54a07060ad"}, + {file = "grpcio-1.67.0-cp38-cp38-win_amd64.whl", hash = "sha256:cff8e54d6a463883cda2fab94d2062aad2f5edd7f06ae3ed030f2a74756db365"}, + {file = "grpcio-1.67.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:62492bd534979e6d7127b8a6b29093161a742dee3875873e01964049d5250a74"}, + {file = "grpcio-1.67.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eef1dce9d1a46119fd09f9a992cf6ab9d9178b696382439446ca5f399d7b96fe"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:f623c57a5321461c84498a99dddf9d13dac0e40ee056d884d6ec4ebcab647a78"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54d16383044e681f8beb50f905249e4e7261dd169d4aaf6e52eab67b01cbbbe2"}, + {file = "grpcio-1.67.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2a44e572fb762c668e4812156b81835f7aba8a721b027e2d4bb29fb50ff4d33"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:391df8b0faac84d42f5b8dfc65f5152c48ed914e13c522fd05f2aca211f8bfad"}, + {file = "grpcio-1.67.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfd9306511fdfc623a1ba1dc3bc07fbd24e6cfbe3c28b4d1e05177baa2f99617"}, + {file = "grpcio-1.67.0-cp39-cp39-win32.whl", hash = "sha256:30d47dbacfd20cbd0c8be9bfa52fdb833b395d4ec32fe5cff7220afc05d08571"}, + {file = "grpcio-1.67.0-cp39-cp39-win_amd64.whl", hash = "sha256:f55f077685f61f0fbd06ea355142b71e47e4a26d2d678b3ba27248abfe67163a"}, + {file = "grpcio-1.67.0.tar.gz", hash = "sha256:e090b2553e0da1c875449c8e75073dd4415dd71c9bde6a406240fdf4c0ee467c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.65.5)"] +protobuf = ["grpcio-tools (>=1.67.0)"] [[package]] name = "h11" @@ -1843,13 +2097,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = true python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -1860,17 +2114,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.27.0" +version = "0.27.2" description = "The next generation HTTP client." optional = true python-versions = ">=3.8" files = [ - {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, - {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, ] [package.dependencies] @@ -1885,6 +2139,7 @@ brotli = ["brotli", "brotlicffi"] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "hyperframe" @@ -1899,15 +2154,18 @@ files = [ [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "ijson" version = "3.3.0" @@ -2024,22 +2282,26 @@ files = [ [[package]] name = "importlib-metadata" -version = "8.4.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, - {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "importlib-resources" @@ -2101,13 +2363,13 @@ files = [ [[package]] name = "ipfabric" -version = "6.9.4" +version = "6.10.0" description = "Python package for interacting with IP Fabric" optional = true python-versions = "<4.0,>=3.8" files = [ - {file = "ipfabric-6.9.4-py3-none-any.whl", hash = "sha256:31d65d4de544233ddc1278b651c1ccf2ac798bde2ce20528ce232e5db602d2f1"}, - {file = "ipfabric-6.9.4.tar.gz", hash = "sha256:6733e0b7447f7c4274735d1010d5879a3952cf540a3f3f458d256aff6a4a1b92"}, + {file = "ipfabric-6.10.0-py3-none-any.whl", hash = "sha256:5f419f1fbe1c9fa939c15969ae8a8e541d3c4e18f2588bc81ee3ba028af21c7a"}, + {file = "ipfabric-6.10.0.tar.gz", hash = "sha256:02b3b47fd10aada88891d7fe56b56782508778daecad34d37f75e3828e3faa07"}, ] [package.dependencies] @@ -2126,9 +2388,11 @@ pytz = ">=2023.1,<2025" typing-extensions = {version = ">=4.9.0,<5.0.0", markers = "python_version < \"3.9\""} [package.extras] -all = ["openpyxl (>=3.1.2,<4.0.0)", "pandas (>=2.0.0,<3.0.0)", "pandas (>=2.1.4,<3.0.0)", "python-json-logger (>=2.0.7,<3.0.0)", "pyyaml (>=6.0.1,<7.0.0)", "rich (>=13.7.0,<14.0.0)", "tabulate (>=0.8.9,<0.10.0)"] -cli = ["rich (>=13.7.0,<14.0.0)"] +all = ["jinja2 (>=3.1.4,<4.0.0)", "openpyxl (>=3.1.2,<4.0.0)", "pandas (>=2.0.0,<3.0.0)", "pandas (>=2.1.4,<3.0.0)", "python-json-logger (>=2.0.7,<3.0.0)", "pyyaml (>=6.0.1,<7.0.0)", "rich (>=13.7.0,<14.0.0)", "tabulate (>=0.8.9,<0.10.0)"] +cli = ["openpyxl (>=3.1.2,<4.0.0)", "rich (>=13.7.0,<14.0.0)"] +cve = ["openpyxl (>=3.1.2,<4.0.0)"] examples = ["openpyxl (>=3.1.2,<4.0.0)", "pandas (>=2.0.0,<3.0.0)", "pandas (>=2.1.4,<3.0.0)", "python-json-logger (>=2.0.7,<3.0.0)", "pyyaml (>=6.0.1,<7.0.0)", "rich (>=13.7.0,<14.0.0)", "tabulate (>=0.8.9,<0.10.0)"] +matrix = ["jinja2 (>=3.1.4,<4.0.0)", "openpyxl (>=3.1.2,<4.0.0)"] pd = ["pandas (>=2.0.0,<3.0.0)", "pandas (>=2.1.4,<3.0.0)"] [[package]] @@ -2259,132 +2523,143 @@ referencing = ">=0.31.0" [[package]] name = "kiwisolver" -version = "1.4.5" +version = "1.4.7" description = "A fast implementation of the Cassowary constraint solver" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:05703cf211d585109fcd72207a31bb170a0f22144d68298dc5e61b3c946518af"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:146d14bebb7f1dc4d5fbf74f8a6cb15ac42baadee8912eb84ac0b3b2a3dc6ac3"}, - {file = "kiwisolver-1.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ef7afcd2d281494c0a9101d5c571970708ad911d028137cd558f02b851c08b4"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9eaa8b117dc8337728e834b9c6e2611f10c79e38f65157c4c38e9400286f5cb1"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec20916e7b4cbfb1f12380e46486ec4bcbaa91a9c448b97023fde0d5bbf9e4ff"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b42c68602539407884cf70d6a480a469b93b81b7701378ba5e2328660c847a"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa12042de0171fad672b6c59df69106d20d5596e4f87b5e8f76df757a7c399aa"}, - {file = "kiwisolver-1.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a40773c71d7ccdd3798f6489aaac9eee213d566850a9533f8d26332d626b82c"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:19df6e621f6d8b4b9c4d45f40a66839294ff2bb235e64d2178f7522d9170ac5b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:83d78376d0d4fd884e2c114d0621624b73d2aba4e2788182d286309ebdeed770"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e391b1f0a8a5a10ab3b9bb6afcfd74f2175f24f8975fb87ecae700d1503cdee0"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:852542f9481f4a62dbb5dd99e8ab7aedfeb8fb6342349a181d4036877410f525"}, - {file = "kiwisolver-1.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59edc41b24031bc25108e210c0def6f6c2191210492a972d585a06ff246bb79b"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win32.whl", hash = "sha256:a6aa6315319a052b4ee378aa171959c898a6183f15c1e541821c5c59beaa0238"}, - {file = "kiwisolver-1.4.5-cp310-cp310-win_amd64.whl", hash = "sha256:d0ef46024e6a3d79c01ff13801cb19d0cad7fd859b15037aec74315540acc276"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:11863aa14a51fd6ec28688d76f1735f8f69ab1fabf388851a595d0721af042f5"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8ab3919a9997ab7ef2fbbed0cc99bb28d3c13e6d4b1ad36e97e482558a91be90"}, - {file = "kiwisolver-1.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fcc700eadbbccbf6bc1bcb9dbe0786b4b1cb91ca0dcda336eef5c2beed37b797"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfdd7c0b105af050eb3d64997809dc21da247cf44e63dc73ff0fd20b96be55a9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c6a5964640638cdeaa0c359382e5703e9293030fe730018ca06bc2010c4437"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbea0db94288e29afcc4c28afbf3a7ccaf2d7e027489c449cf7e8f83c6346eb9"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ceec1a6bc6cab1d6ff5d06592a91a692f90ec7505d6463a88a52cc0eb58545da"}, - {file = "kiwisolver-1.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:040c1aebeda72197ef477a906782b5ab0d387642e93bda547336b8957c61022e"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f91de7223d4c7b793867797bacd1ee53bfe7359bd70d27b7b58a04efbb9436c8"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:faae4860798c31530dd184046a900e652c95513796ef51a12bc086710c2eec4d"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0157420efcb803e71d1b28e2c287518b8808b7cf1ab8af36718fd0a2c453eb0"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:06f54715b7737c2fecdbf140d1afb11a33d59508a47bf11bb38ecf21dc9ab79f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fdb7adb641a0d13bdcd4ef48e062363d8a9ad4a182ac7647ec88f695e719ae9f"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win32.whl", hash = "sha256:bb86433b1cfe686da83ce32a9d3a8dd308e85c76b60896d58f082136f10bffac"}, - {file = "kiwisolver-1.4.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c08e1312a9cf1074d17b17728d3dfce2a5125b2d791527f33ffbe805200a355"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:32d5cf40c4f7c7b3ca500f8985eb3fb3a7dfc023215e876f207956b5ea26632a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f846c260f483d1fd217fe5ed7c173fb109efa6b1fc8381c8b7552c5781756192"}, - {file = "kiwisolver-1.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ff5cf3571589b6d13bfbfd6bcd7a3f659e42f96b5fd1c4830c4cf21d4f5ef45"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7269d9e5f1084a653d575c7ec012ff57f0c042258bf5db0954bf551c158466e7"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da802a19d6e15dffe4b0c24b38b3af68e6c1a68e6e1d8f30148c83864f3881db"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3aba7311af82e335dd1e36ffff68aaca609ca6290c2cb6d821a39aa075d8e3ff"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763773d53f07244148ccac5b084da5adb90bfaee39c197554f01b286cf869228"}, - {file = "kiwisolver-1.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270953c0d8cdab5d422bee7d2007f043473f9d2999631c86a223c9db56cbd16"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d099e745a512f7e3bbe7249ca835f4d357c586d78d79ae8f1dcd4d8adeb9bda9"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:74db36e14a7d1ce0986fa104f7d5637aea5c82ca6326ed0ec5694280942d1162"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e5bab140c309cb3a6ce373a9e71eb7e4873c70c2dda01df6820474f9889d6d4"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0f114aa76dc1b8f636d077979c0ac22e7cd8f3493abbab152f20eb8d3cda71f3"}, - {file = "kiwisolver-1.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:88a2df29d4724b9237fc0c6eaf2a1adae0cdc0b3e9f4d8e7dc54b16812d2d81a"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win32.whl", hash = "sha256:72d40b33e834371fd330fb1472ca19d9b8327acb79a5821d4008391db8e29f20"}, - {file = "kiwisolver-1.4.5-cp312-cp312-win_amd64.whl", hash = "sha256:2c5674c4e74d939b9d91dda0fae10597ac7521768fec9e399c70a1f27e2ea2d9"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3a2b053a0ab7a3960c98725cfb0bf5b48ba82f64ec95fe06f1d06c99b552e130"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cd32d6c13807e5c66a7cbb79f90b553642f296ae4518a60d8d76243b0ad2898"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59ec7b7c7e1a61061850d53aaf8e93db63dce0c936db1fda2658b70e4a1be709"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da4cfb373035def307905d05041c1d06d8936452fe89d464743ae7fb8371078b"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2400873bccc260b6ae184b2b8a4fec0e4082d30648eadb7c3d9a13405d861e89"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1b04139c4236a0f3aff534479b58f6f849a8b351e1314826c2d230849ed48985"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4e66e81a5779b65ac21764c295087de82235597a2293d18d943f8e9e32746265"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7931d8f1f67c4be9ba1dd9c451fb0eeca1a25b89e4d3f89e828fe12a519b782a"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b3f7e75f3015df442238cca659f8baa5f42ce2a8582727981cbfa15fee0ee205"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:bbf1d63eef84b2e8c89011b7f2235b1e0bf7dacc11cac9431fc6468e99ac77fb"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4c380469bd3f970ef677bf2bcba2b6b0b4d5c75e7a020fb863ef75084efad66f"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win32.whl", hash = "sha256:9408acf3270c4b6baad483865191e3e582b638b1654a007c62e3efe96f09a9a3"}, - {file = "kiwisolver-1.4.5-cp37-cp37m-win_amd64.whl", hash = "sha256:5b94529f9b2591b7af5f3e0e730a4e0a41ea174af35a4fd067775f9bdfeee01a"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:11c7de8f692fc99816e8ac50d1d1aef4f75126eefc33ac79aac02c099fd3db71"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:53abb58632235cd154176ced1ae8f0d29a6657aa1aa9decf50b899b755bc2b93"}, - {file = "kiwisolver-1.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:88b9f257ca61b838b6f8094a62418421f87ac2a1069f7e896c36a7d86b5d4c29"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3195782b26fc03aa9c6913d5bad5aeb864bdc372924c093b0f1cebad603dd712"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc579bf0f502e54926519451b920e875f433aceb4624a3646b3252b5caa9e0b6"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a580c91d686376f0f7c295357595c5a026e6cbc3d77b7c36e290201e7c11ecb"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cfe6ab8da05c01ba6fbea630377b5da2cd9bcbc6338510116b01c1bc939a2c18"}, - {file = "kiwisolver-1.4.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d2e5a98f0ec99beb3c10e13b387f8db39106d53993f498b295f0c914328b1333"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a51a263952b1429e429ff236d2f5a21c5125437861baeed77f5e1cc2d2c7c6da"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3edd2fa14e68c9be82c5b16689e8d63d89fe927e56debd6e1dbce7a26a17f81b"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:74d1b44c6cfc897df648cc9fdaa09bc3e7679926e6f96df05775d4fb3946571c"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:76d9289ed3f7501012e05abb8358bbb129149dbd173f1f57a1bf1c22d19ab7cc"}, - {file = "kiwisolver-1.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:92dea1ffe3714fa8eb6a314d2b3c773208d865a0e0d35e713ec54eea08a66250"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win32.whl", hash = "sha256:5c90ae8c8d32e472be041e76f9d2f2dbff4d0b0be8bd4041770eddb18cf49a4e"}, - {file = "kiwisolver-1.4.5-cp38-cp38-win_amd64.whl", hash = "sha256:c7940c1dc63eb37a67721b10d703247552416f719c4188c54e04334321351ced"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9407b6a5f0d675e8a827ad8742e1d6b49d9c1a1da5d952a67d50ef5f4170b18d"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15568384086b6df3c65353820a4473575dbad192e35010f622c6ce3eebd57af9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0dc9db8e79f0036e8173c466d21ef18e1befc02de8bf8aa8dc0813a6dc8a7046"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:cdc8a402aaee9a798b50d8b827d7ecf75edc5fb35ea0f91f213ff927c15f4ff0"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6c3bd3cde54cafb87d74d8db50b909705c62b17c2099b8f2e25b461882e544ff"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:955e8513d07a283056b1396e9a57ceddbd272d9252c14f154d450d227606eb54"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:346f5343b9e3f00b8db8ba359350eb124b98c99efd0b408728ac6ebf38173958"}, - {file = "kiwisolver-1.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9098e0049e88c6a24ff64545cdfc50807818ba6c1b739cae221bbbcbc58aad3"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:00bd361b903dc4bbf4eb165f24d1acbee754fce22ded24c3d56eec268658a5cf"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7b8b454bac16428b22560d0a1cf0a09875339cab69df61d7805bf48919415901"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f1d072c2eb0ad60d4c183f3fb44ac6f73fb7a8f16a2694a91f988275cbf352f9"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:31a82d498054cac9f6d0b53d02bb85811185bcb477d4b60144f915f3b3126342"}, - {file = "kiwisolver-1.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6512cb89e334e4700febbffaaa52761b65b4f5a3cf33f960213d5656cea36a77"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win32.whl", hash = "sha256:9db8ea4c388fdb0f780fe91346fd438657ea602d58348753d9fb265ce1bca67f"}, - {file = "kiwisolver-1.4.5-cp39-cp39-win_amd64.whl", hash = "sha256:59415f46a37f7f2efeec758353dd2eae1b07640d8ca0f0c42548ec4125492635"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5c7b3b3a728dc6faf3fc372ef24f21d1e3cee2ac3e9596691d746e5a536de920"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:620ced262a86244e2be10a676b646f29c34537d0d9cc8eb26c08f53d98013390"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:378a214a1e3bbf5ac4a8708304318b4f890da88c9e6a07699c4ae7174c09a68d"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf7be1207676ac608a50cd08f102f6742dbfc70e8d60c4db1c6897f62f71523"}, - {file = "kiwisolver-1.4.5-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ba55dce0a9b8ff59495ddd050a0225d58bd0983d09f87cfe2b6aec4f2c1234e4"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd32ea360bcbb92d28933fc05ed09bffcb1704ba3fc7942e81db0fd4f81a7892"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5e7139af55d1688f8b960ee9ad5adafc4ac17c1c473fe07133ac092310d76544"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dced8146011d2bc2e883f9bd68618b8247387f4bbec46d7392b3c3b032640126"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9bf3325c47b11b2e51bca0824ea217c7cd84491d8ac4eefd1e409705ef092bd"}, - {file = "kiwisolver-1.4.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5794cf59533bc3f1b1c821f7206a3617999db9fbefc345360aafe2e067514929"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e368f200bbc2e4f905b8e71eb38b3c04333bddaa6a2464a6355487b02bb7fb09"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d706eba36b4c4d5bc6c6377bb6568098765e990cfc21ee16d13963fab7b3e7"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85267bd1aa8880a9c88a8cb71e18d3d64d2751a790e6ca6c27b8ccc724bcd5ad"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210ef2c3a1f03272649aff1ef992df2e724748918c4bc2d5a90352849eb40bea"}, - {file = "kiwisolver-1.4.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11d011a7574eb3b82bcc9c1a1d35c1d7075677fdd15de527d91b46bd35e935ee"}, - {file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, + {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, + {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, + {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, + {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, + {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, + {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, + {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, + {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, + {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, + {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, + {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, + {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, + {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, + {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, + {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, + {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, + {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, + {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, + {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, + {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, + {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, + {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, + {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, + {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, + {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, + {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, + {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, ] [[package]] name = "kombu" -version = "5.4.0" +version = "5.4.2" description = "Messaging library for Python." optional = false python-versions = ">=3.8" files = [ - {file = "kombu-5.4.0-py3-none-any.whl", hash = "sha256:c8dd99820467610b4febbc7a9e8a0d3d7da2d35116b67184418b51cc520ea6b6"}, - {file = "kombu-5.4.0.tar.gz", hash = "sha256:ad200a8dbdaaa2bbc5f26d2ee7d707d9a1fded353a0f4bd751ce8c7d9f449c60"}, + {file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"}, + {file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"}, ] [package.dependencies] amqp = ">=5.1.1,<6.0.0" "backports.zoneinfo" = {version = ">=0.2.1", extras = ["tzdata"], markers = "python_version < \"3.9\""} typing-extensions = {version = "4.12.2", markers = "python_version < \"3.10\""} +tzdata = {version = "*", markers = "python_version >= \"3.9\""} vine = "5.1.0" [package.extras] @@ -2394,7 +2669,7 @@ confluentkafka = ["confluent-kafka (>=2.2.0)"] consul = ["python-consul2 (==0.1.5)"] librabbitmq = ["librabbitmq (>=2.0.0)"] mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack (==1.0.8)"] +msgpack = ["msgpack (==1.1.0)"] pyro = ["pyro4 (==4.82)"] qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] @@ -2698,13 +2973,13 @@ files = [ [[package]] name = "mdit-py-plugins" -version = "0.4.1" +version = "0.4.2" description = "Collection of plugins for markdown-it-py" optional = false python-versions = ">=3.8" files = [ - {file = "mdit_py_plugins-0.4.1-py3-none-any.whl", hash = "sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a"}, - {file = "mdit_py_plugins-0.4.1.tar.gz", hash = "sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c"}, + {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, + {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, ] [package.dependencies] @@ -2726,6 +3001,21 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "meraki" +version = "1.45.0" +description = "Cisco Meraki Dashboard API library" +optional = true +python-versions = ">=3.8" +files = [ + {file = "meraki-1.45.0-py3-none-any.whl", hash = "sha256:8681d107fbd6b93a5fc1f98e0c3d5b611dd2df520a83de6293bd423851e89087"}, + {file = "meraki-1.45.0.tar.gz", hash = "sha256:3c179ea2128dcc032511d023cc1f1fb153dcfa697e99c6961e85563de3fddc1a"}, +] + +[package.dependencies] +aiohttp = "*" +requests = "*" + [[package]] name = "mergedeep" version = "1.3.4" @@ -2770,13 +3060,13 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp [[package]] name = "mkdocs-autorefs" -version = "1.1.0" +version = "1.2.0" description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_autorefs-1.1.0-py3-none-any.whl", hash = "sha256:492ac42f50214e81565e968f8cb0df9aba9d981542b9e7121b8f8ae9407fe6eb"}, - {file = "mkdocs_autorefs-1.1.0.tar.gz", hash = "sha256:f2fd43b11f66284bd014f9b542a05c8ecbfaad4e0d7b30b68584788217b6c656"}, + {file = "mkdocs_autorefs-1.2.0-py3-none-any.whl", hash = "sha256:d588754ae89bd0ced0c70c06f58566a4ee43471eeeee5202427da7de9ef85a2f"}, + {file = "mkdocs_autorefs-1.2.0.tar.gz", hash = "sha256:a86b93abff653521bda71cf3fc5596342b7a23982093915cb74273f67522190f"}, ] [package.dependencies] @@ -2886,69 +3176,181 @@ mkdocstrings = ">=0.25" [[package]] name = "msgpack" -version = "1.0.8" +version = "1.1.0" description = "MessagePack serializer" optional = true python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, - {file = "msgpack-1.0.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:376081f471a2ef24828b83a641a02c575d6103a3ad7fd7dade5486cad10ea659"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e390971d082dba073c05dbd56322427d3280b7cc8b53484c9377adfbae67dc2"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e073efcba9ea99db5acef3959efa45b52bc67b61b00823d2a1a6944bf45982"}, - {file = "msgpack-1.0.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82d92c773fbc6942a7a8b520d22c11cfc8fd83bba86116bfcf962c2f5c2ecdaa"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9ee32dcb8e531adae1f1ca568822e9b3a738369b3b686d1477cbc643c4a9c128"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e3aa7e51d738e0ec0afbed661261513b38b3014754c9459508399baf14ae0c9d"}, - {file = "msgpack-1.0.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69284049d07fce531c17404fcba2bb1df472bc2dcdac642ae71a2d079d950653"}, - {file = "msgpack-1.0.8-cp310-cp310-win32.whl", hash = "sha256:13577ec9e247f8741c84d06b9ece5f654920d8365a4b636ce0e44f15e07ec693"}, - {file = "msgpack-1.0.8-cp310-cp310-win_amd64.whl", hash = "sha256:e532dbd6ddfe13946de050d7474e3f5fb6ec774fbb1a188aaf469b08cf04189a"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9517004e21664f2b5a5fd6333b0731b9cf0817403a941b393d89a2f1dc2bd836"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d16a786905034e7e34098634b184a7d81f91d4c3d246edc6bd7aefb2fd8ea6ad"}, - {file = "msgpack-1.0.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2872993e209f7ed04d963e4b4fbae72d034844ec66bc4ca403329db2074377b"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c330eace3dd100bdb54b5653b966de7f51c26ec4a7d4e87132d9b4f738220ba"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b5c044f3eff2a6534768ccfd50425939e7a8b5cf9a7261c385de1e20dcfc85"}, - {file = "msgpack-1.0.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1876b0b653a808fcd50123b953af170c535027bf1d053b59790eebb0aeb38950"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dfe1f0f0ed5785c187144c46a292b8c34c1295c01da12e10ccddfc16def4448a"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3528807cbbb7f315bb81959d5961855e7ba52aa60a3097151cb21956fbc7502b"}, - {file = "msgpack-1.0.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e2f879ab92ce502a1e65fce390eab619774dda6a6ff719718069ac94084098ce"}, - {file = "msgpack-1.0.8-cp311-cp311-win32.whl", hash = "sha256:26ee97a8261e6e35885c2ecd2fd4a6d38252246f94a2aec23665a4e66d066305"}, - {file = "msgpack-1.0.8-cp311-cp311-win_amd64.whl", hash = "sha256:eadb9f826c138e6cf3c49d6f8de88225a3c0ab181a9b4ba792e006e5292d150e"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:114be227f5213ef8b215c22dde19532f5da9652e56e8ce969bf0a26d7c419fee"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d661dc4785affa9d0edfdd1e59ec056a58b3dbb9f196fa43587f3ddac654ac7b"}, - {file = "msgpack-1.0.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d56fd9f1f1cdc8227d7b7918f55091349741904d9520c65f0139a9755952c9e8"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0726c282d188e204281ebd8de31724b7d749adebc086873a59efb8cf7ae27df3"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8db8e423192303ed77cff4dce3a4b88dbfaf43979d280181558af5e2c3c71afc"}, - {file = "msgpack-1.0.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99881222f4a8c2f641f25703963a5cefb076adffd959e0558dc9f803a52d6a58"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b5505774ea2a73a86ea176e8a9a4a7c8bf5d521050f0f6f8426afe798689243f"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ef254a06bcea461e65ff0373d8a0dd1ed3aa004af48839f002a0c994a6f72d04"}, - {file = "msgpack-1.0.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1dd7839443592d00e96db831eddb4111a2a81a46b028f0facd60a09ebbdd543"}, - {file = "msgpack-1.0.8-cp312-cp312-win32.whl", hash = "sha256:64d0fcd436c5683fdd7c907eeae5e2cbb5eb872fafbc03a43609d7941840995c"}, - {file = "msgpack-1.0.8-cp312-cp312-win_amd64.whl", hash = "sha256:74398a4cf19de42e1498368c36eed45d9528f5fd0155241e82c4082b7e16cffd"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ceea77719d45c839fd73abcb190b8390412a890df2f83fb8cf49b2a4b5c2f40"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1ab0bbcd4d1f7b6991ee7c753655b481c50084294218de69365f8f1970d4c151"}, - {file = "msgpack-1.0.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1cce488457370ffd1f953846f82323cb6b2ad2190987cd4d70b2713e17268d24"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3923a1778f7e5ef31865893fdca12a8d7dc03a44b33e2a5f3295416314c09f5d"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22e47578b30a3e199ab067a4d43d790249b3c0587d9a771921f86250c8435db"}, - {file = "msgpack-1.0.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd739c9251d01e0279ce729e37b39d49a08c0420d3fee7f2a4968c0576678f77"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d3420522057ebab1728b21ad473aa950026d07cb09da41103f8e597dfbfaeb13"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5845fdf5e5d5b78a49b826fcdc0eb2e2aa7191980e3d2cfd2a30303a74f212e2"}, - {file = "msgpack-1.0.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a0e76621f6e1f908ae52860bdcb58e1ca85231a9b0545e64509c931dd34275a"}, - {file = "msgpack-1.0.8-cp38-cp38-win32.whl", hash = "sha256:374a8e88ddab84b9ada695d255679fb99c53513c0a51778796fcf0944d6c789c"}, - {file = "msgpack-1.0.8-cp38-cp38-win_amd64.whl", hash = "sha256:f3709997b228685fe53e8c433e2df9f0cdb5f4542bd5114ed17ac3c0129b0480"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:73ee792784d48aa338bba28063e19a27e8d989344f34aad14ea6e1b9bd83f596"}, - {file = "msgpack-1.0.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e75753aeda0ddc4c28dce4c32ba2f6ec30b1b02f6c0b14e547841ba5b24f753f"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dbf059fb4b7c240c873c1245ee112505be27497e90f7c6591261c7d3c3a8228"}, - {file = "msgpack-1.0.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4916727e31c28be8beaf11cf117d6f6f188dcc36daae4e851fee88646f5b6b18"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7938111ed1358f536daf311be244f34df7bf3cdedb3ed883787aca97778b28d8"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:493c5c5e44b06d6c9268ce21b302c9ca055c1fd3484c25ba41d34476c76ee746"}, - {file = "msgpack-1.0.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fbb160554e319f7b22ecf530a80a3ff496d38e8e07ae763b9e82fadfe96f273"}, - {file = "msgpack-1.0.8-cp39-cp39-win32.whl", hash = "sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d"}, - {file = "msgpack-1.0.8-cp39-cp39-win_amd64.whl", hash = "sha256:ed59dd52075f8fc91da6053b12e8c89e37aa043f8986efd89e61fae69dc1b011"}, - {file = "msgpack-1.0.8.tar.gz", hash = "sha256:95c02b0e27e706e48d0e5426d1710ca78e0f0628d6e89d5b5a5b91a5f12274f3"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, +] + +[[package]] +name = "multidict" +version = "6.1.0" +description = "multidict implementation" +optional = true +python-versions = ">=3.8" +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + [[package]] name = "myst-parser" version = "2.0.0" @@ -2977,18 +3379,18 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4, [[package]] name = "nautobot" -version = "2.3.1" +version = "2.3.8" description = "Source of truth and network automation platform." optional = false python-versions = "<3.13,>=3.8" files = [ - {file = "nautobot-2.3.1-py3-none-any.whl", hash = "sha256:28c02e229dcc87d69dba0e75d36c3bd219fefa9328ac996471e9b39f3ec74bb3"}, - {file = "nautobot-2.3.1.tar.gz", hash = "sha256:96a3f0ee9cf73b404abca34bd2ed53a6d4494fcf85338734baa10dcd977f27f7"}, + {file = "nautobot-2.3.8-py3-none-any.whl", hash = "sha256:1624480705158ba29fb3c9e0dc3d80e4809115b9a8584a90a787f3e7b37b484c"}, + {file = "nautobot-2.3.8.tar.gz", hash = "sha256:25319ccec7f99478b506d899159e02f02c8dcf61cd14c68401787a6e1e301dd1"}, ] [package.dependencies] celery = ">=5.3.6,<5.4.0" -Django = ">=4.2.15,<4.3.0" +Django = ">=4.2.16,<4.3.0" django-ajax-tables = ">=1.1.1,<1.2.0" django-celery-beat = ">=2.6.0,<2.7.0" django-celery-results = ">=2.5.1,<2.6.0" @@ -3002,7 +3404,7 @@ django-jinja = ">=2.11.0,<2.12.0" django-prometheus = ">=2.3.1,<2.4.0" django-redis = ">=5.4.0,<5.5.0" django-silk = ">=5.1.0,<5.2.0" -django-structlog = {version = ">=8.1.0,<9.0.0", extras = ["all"]} +django-structlog = {version = ">=8.1.0,<9.0.0", extras = ["celery"]} django-tables2 = ">=2.7.0,<2.8.0" django-taggit = ">=5.0.0,<5.1.0" django-timezone-field = ">=7.0,<7.1" @@ -3017,6 +3419,7 @@ graphene-django = ">=2.16.0,<2.17.0" graphene-django-optimizer = ">=0.8.0,<0.9.0" Jinja2 = ">=3.1.4,<3.2.0" jsonschema = ">=4.7.0,<5.0.0" +kombu = ">=5.4.2,<5.5.0" Markdown = ">=3.6,<3.7" MarkupSafe = ">=2.1.5,<2.2.0" netaddr = ">=1.3.0,<1.4.0" @@ -3027,8 +3430,8 @@ Pillow = ">=10.3.0,<10.4.0" prometheus-client = ">=0.20.0,<0.21.0" psycopg2-binary = ">=2.9.9,<2.10.0" python-slugify = ">=8.0.3,<8.1.0" -pyuwsgi = ">=2.0.23,<2.1.0" -PyYAML = ">=6.0,<6.1" +pyuwsgi = ">=2.0.26,<2.1.0" +PyYAML = ">=6.0.2,<6.1.0" social-auth-app-django = ">=5.4.2,<5.5.0" svgwrite = ">=1.4.2,<1.5.0" @@ -3076,13 +3479,13 @@ nicer-shell = ["ipython"] [[package]] name = "netutils" -version = "1.9.1" +version = "1.10.0" description = "Common helper functions useful in network automation." optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "netutils-1.9.1-py3-none-any.whl", hash = "sha256:0d6e9026cc529f365a63377159aed07769baee0bf7a7138fa86fce37b64dd9d4"}, - {file = "netutils-1.9.1.tar.gz", hash = "sha256:8ad8b5e02eb9d6692d0aaaf9c0f36da1a81f520f426a79d0e08e56cf7dbb3476"}, + {file = "netutils-1.10.0-py3-none-any.whl", hash = "sha256:19b8cc3d2cf567a986f916c90f298d241af03a71c62ec6d38d6dc3395347670b"}, + {file = "netutils-1.10.0.tar.gz", hash = "sha256:f457fb85cb622e89aa0403fb2128c50986f7ce38d93a5873981727d088619793"}, ] [package.extras] @@ -3238,14 +3641,19 @@ files = [ [[package]] name = "paginate" -version = "0.5.6" +version = "0.5.7" description = "Divides large result sets into pages for easier browsing" optional = false python-versions = "*" files = [ - {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, + {file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"}, + {file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"}, ] +[package.extras] +dev = ["pytest", "tox"] +lint = ["black"] + [[package]] name = "parameterized" version = "0.8.1" @@ -3409,19 +3817,19 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "prometheus-client" @@ -3455,114 +3863,219 @@ test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", [[package]] name = "prompt-toolkit" -version = "3.0.47" +version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, - {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, ] [package.dependencies] wcwidth = "*" +[[package]] +name = "propcache" +version = "0.2.0" +description = "Accelerated property cache" +optional = true +python-versions = ">=3.8" +files = [ + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, + {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"}, + {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"}, + {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"}, + {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"}, + {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"}, + {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"}, + {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"}, + {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"}, + {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"}, + {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"}, + {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"}, + {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"}, + {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"}, + {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"}, + {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"}, + {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"}, + {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"}, + {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"}, + {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"}, + {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"}, + {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"}, + {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"}, + {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"}, + {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"}, + {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"}, + {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"}, + {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"}, + {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"}, + {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"}, + {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"}, + {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"}, + {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, +] + [[package]] name = "protobuf" -version = "4.25.4" +version = "4.25.5" description = "" optional = true python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, ] [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] @@ -3603,13 +4116,13 @@ files = [ [[package]] name = "pycodestyle" -version = "2.9.1" +version = "2.12.1" description = "Python style guide checker" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] @@ -3638,119 +4151,120 @@ files = [ [[package]] name = "pydantic" -version = "2.8.2" +version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, - {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, ] [package.dependencies] -annotated-types = ">=0.4.0" -pydantic-core = "2.20.1" +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" typing-extensions = {version = ">=4.6.1", markers = "python_version < \"3.13\""} [package.extras] email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.20.1" +version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, - {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, - {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, - {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, - {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, - {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, - {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, - {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, - {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, - {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, - {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, - {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, - {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, - {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, - {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, - {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, - {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, - {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, - {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, - {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, - {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, - {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, - {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, - {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, - {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, - {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, - {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, - {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, - {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, - {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, - {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, - {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, - {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, ] [package.dependencies] @@ -3780,13 +4294,13 @@ semver = ["semver (>=3.0.2)"] [[package]] name = "pydantic-settings" -version = "2.4.0" +version = "2.6.0" description = "Settings management using Pydantic" optional = true python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.4.0-py3-none-any.whl", hash = "sha256:bb6849dc067f1687574c12a639e231f3a6feeed0a12d710c1382045c5db1c315"}, - {file = "pydantic_settings-2.4.0.tar.gz", hash = "sha256:ed81c3a0f46392b4d7c0a565c05884e6e54b3456e6f0fe4d8814981172dc9a88"}, + {file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"}, + {file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"}, ] [package.dependencies] @@ -3909,13 +4423,13 @@ pylint = ">=1.7" [[package]] name = "pymdown-extensions" -version = "10.9" +version = "10.11.2" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.9-py3-none-any.whl", hash = "sha256:d323f7e90d83c86113ee78f3fe62fc9dee5f56b54d912660703ea1816fed5626"}, - {file = "pymdown_extensions-10.9.tar.gz", hash = "sha256:6ff740bcd99ec4172a938970d42b96128bdc9d4b9bcad72494f29921dc69b753"}, + {file = "pymdown_extensions-10.11.2-py3-none-any.whl", hash = "sha256:41cdde0a77290e480cf53892f5c5e50921a7ee3e5cd60ba91bf19837b33badcf"}, + {file = "pymdown_extensions-10.11.2.tar.gz", hash = "sha256:bc8847ecc9e784a098efd35e20cba772bc5a1b529dfcef9dc1972db9021a1049"}, ] [package.dependencies] @@ -3927,13 +4441,13 @@ extra = ["pygments (>=2.12)"] [[package]] name = "pyparsing" -version = "3.1.2" +version = "3.1.4" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = true python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, - {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, + {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, + {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, ] [package.extras] @@ -4059,13 +4573,13 @@ postgresql = ["psycopg2"] [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -4202,21 +4716,21 @@ pyyaml = "*" [[package]] name = "redis" -version = "5.0.8" +version = "5.1.1" description = "Python client for Redis database and key-value store" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "redis-5.0.8-py3-none-any.whl", hash = "sha256:56134ee08ea909106090934adc36f65c9bcbbaecea5b21ba704ba6fb561f8eb4"}, - {file = "redis-5.0.8.tar.gz", hash = "sha256:0c5b10d387568dfe0698c6fad6615750c24170e548ca2deac10c649d463e9870"}, + {file = "redis-5.1.1-py3-none-any.whl", hash = "sha256:f8ea06b7482a668c6475ae202ed8d9bcaa409f6e87fb77ed1043d912afd62e24"}, + {file = "redis-5.1.1.tar.gz", hash = "sha256:f6c997521fedbae53387307c5d0bf784d9acc28d9f1d058abeac566ec4dbed72"}, ] [package.dependencies] async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} [package.extras] -hiredis = ["hiredis (>1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] [[package]] name = "referencing" @@ -4235,90 +4749,105 @@ rpds-py = ">=0.7.0" [[package]] name = "regex" -version = "2024.7.24" +version = "2024.9.11" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" files = [ - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b0d3f567fafa0633aee87f08b9276c7062da9616931382993c03808bb68ce"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3426de3b91d1bc73249042742f45c2148803c111d1175b283270177fdf669024"}, - {file = "regex-2024.7.24-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f273674b445bcb6e4409bf8d1be67bc4b58e8b46fd0d560055d515b8830063cd"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23acc72f0f4e1a9e6e9843d6328177ae3074b4182167e34119ec7233dfeccf53"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65fd3d2e228cae024c411c5ccdffae4c315271eee4a8b839291f84f796b34eca"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c414cbda77dbf13c3bc88b073a1a9f375c7b0cb5e115e15d4b73ec3a2fbc6f59"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf7a89eef64b5455835f5ed30254ec19bf41f7541cd94f266ab7cbd463f00c41"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19c65b00d42804e3fbea9708f0937d157e53429a39b7c61253ff15670ff62cb5"}, - {file = "regex-2024.7.24-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7a5486ca56c8869070a966321d5ab416ff0f83f30e0e2da1ab48815c8d165d46"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6f51f9556785e5a203713f5efd9c085b4a45aecd2a42573e2b5041881b588d1f"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a4997716674d36a82eab3e86f8fa77080a5d8d96a389a61ea1d0e3a94a582cf7"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c0abb5e4e8ce71a61d9446040c1e86d4e6d23f9097275c5bd49ed978755ff0fe"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:18300a1d78cf1290fa583cd8b7cde26ecb73e9f5916690cf9d42de569c89b1ce"}, - {file = "regex-2024.7.24-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:416c0e4f56308f34cdb18c3f59849479dde5b19febdcd6e6fa4d04b6c31c9faa"}, - {file = "regex-2024.7.24-cp310-cp310-win32.whl", hash = "sha256:fb168b5924bef397b5ba13aabd8cf5df7d3d93f10218d7b925e360d436863f66"}, - {file = "regex-2024.7.24-cp310-cp310-win_amd64.whl", hash = "sha256:6b9fc7e9cc983e75e2518496ba1afc524227c163e43d706688a6bb9eca41617e"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:382281306e3adaaa7b8b9ebbb3ffb43358a7bbf585fa93821300a418bb975281"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4fdd1384619f406ad9037fe6b6eaa3de2749e2e12084abc80169e8e075377d3b"}, - {file = "regex-2024.7.24-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d974d24edb231446f708c455fd08f94c41c1ff4f04bcf06e5f36df5ef50b95a"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2ec4419a3fe6cf8a4795752596dfe0adb4aea40d3683a132bae9c30b81e8d73"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb563dd3aea54c797adf513eeec819c4213d7dbfc311874eb4fd28d10f2ff0f2"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45104baae8b9f67569f0f1dca5e1f1ed77a54ae1cd8b0b07aba89272710db61e"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:994448ee01864501912abf2bad9203bffc34158e80fe8bfb5b031f4f8e16da51"}, - {file = "regex-2024.7.24-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fac296f99283ac232d8125be932c5cd7644084a30748fda013028c815ba3364"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e37e809b9303ec3a179085415cb5f418ecf65ec98cdfe34f6a078b46ef823ee"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:01b689e887f612610c869421241e075c02f2e3d1ae93a037cb14f88ab6a8934c"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f6442f0f0ff81775eaa5b05af8a0ffa1dda36e9cf6ec1e0d3d245e8564b684ce"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:871e3ab2838fbcb4e0865a6e01233975df3a15e6fce93b6f99d75cacbd9862d1"}, - {file = "regex-2024.7.24-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c918b7a1e26b4ab40409820ddccc5d49871a82329640f5005f73572d5eaa9b5e"}, - {file = "regex-2024.7.24-cp311-cp311-win32.whl", hash = "sha256:2dfbb8baf8ba2c2b9aa2807f44ed272f0913eeeba002478c4577b8d29cde215c"}, - {file = "regex-2024.7.24-cp311-cp311-win_amd64.whl", hash = "sha256:538d30cd96ed7d1416d3956f94d54e426a8daf7c14527f6e0d6d425fcb4cca52"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4ebef608553aff8deb845c7f4f1d0740ff76fa672c011cc0bacb2a00fbde86"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:74007a5b25b7a678459f06559504f1eec2f0f17bca218c9d56f6a0a12bfffdad"}, - {file = "regex-2024.7.24-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7df9ea48641da022c2a3c9c641650cd09f0cd15e8908bf931ad538f5ca7919c9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1141a1dcc32904c47f6846b040275c6e5de0bf73f17d7a409035d55b76f289"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80c811cfcb5c331237d9bad3bea2c391114588cf4131707e84d9493064d267f9"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7214477bf9bd195894cf24005b1e7b496f46833337b5dedb7b2a6e33f66d962c"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d55588cba7553f0b6ec33130bc3e114b355570b45785cebdc9daed8c637dd440"}, - {file = "regex-2024.7.24-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:558a57cfc32adcf19d3f791f62b5ff564922942e389e3cfdb538a23d65a6b610"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a512eed9dfd4117110b1881ba9a59b31433caed0c4101b361f768e7bcbaf93c5"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:86b17ba823ea76256b1885652e3a141a99a5c4422f4a869189db328321b73799"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5eefee9bfe23f6df09ffb6dfb23809f4d74a78acef004aa904dc7c88b9944b05"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:731fcd76bbdbf225e2eb85b7c38da9633ad3073822f5ab32379381e8c3c12e94"}, - {file = "regex-2024.7.24-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eaef80eac3b4cfbdd6de53c6e108b4c534c21ae055d1dbea2de6b3b8ff3def38"}, - {file = "regex-2024.7.24-cp312-cp312-win32.whl", hash = "sha256:185e029368d6f89f36e526764cf12bf8d6f0e3a2a7737da625a76f594bdfcbfc"}, - {file = "regex-2024.7.24-cp312-cp312-win_amd64.whl", hash = "sha256:2f1baff13cc2521bea83ab2528e7a80cbe0ebb2c6f0bfad15be7da3aed443908"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:66b4c0731a5c81921e938dcf1a88e978264e26e6ac4ec96a4d21ae0354581ae0"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:88ecc3afd7e776967fa16c80f974cb79399ee8dc6c96423321d6f7d4b881c92b"}, - {file = "regex-2024.7.24-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64bd50cf16bcc54b274e20235bf8edbb64184a30e1e53873ff8d444e7ac656b2"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb462f0e346fcf41a901a126b50f8781e9a474d3927930f3490f38a6e73b6950"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a82465ebbc9b1c5c50738536fdfa7cab639a261a99b469c9d4c7dcbb2b3f1e57"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68a8f8c046c6466ac61a36b65bb2395c74451df2ffb8458492ef49900efed293"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac8e84fff5d27420f3c1e879ce9929108e873667ec87e0c8eeb413a5311adfe"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba2537ef2163db9e6ccdbeb6f6424282ae4dea43177402152c67ef869cf3978b"}, - {file = "regex-2024.7.24-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:43affe33137fcd679bdae93fb25924979517e011f9dea99163f80b82eadc7e53"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:c9bb87fdf2ab2370f21e4d5636e5317775e5d51ff32ebff2cf389f71b9b13750"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:945352286a541406f99b2655c973852da7911b3f4264e010218bbc1cc73168f2"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8bc593dcce679206b60a538c302d03c29b18e3d862609317cb560e18b66d10cf"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3f3b6ca8eae6d6c75a6cff525c8530c60e909a71a15e1b731723233331de4169"}, - {file = "regex-2024.7.24-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c51edc3541e11fbe83f0c4d9412ef6c79f664a3745fab261457e84465ec9d5a8"}, - {file = "regex-2024.7.24-cp38-cp38-win32.whl", hash = "sha256:d0a07763776188b4db4c9c7fb1b8c494049f84659bb387b71c73bbc07f189e96"}, - {file = "regex-2024.7.24-cp38-cp38-win_amd64.whl", hash = "sha256:8fd5afd101dcf86a270d254364e0e8dddedebe6bd1ab9d5f732f274fa00499a5"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0ffe3f9d430cd37d8fa5632ff6fb36d5b24818c5c986893063b4e5bdb84cdf24"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25419b70ba00a16abc90ee5fce061228206173231f004437730b67ac77323f0d"}, - {file = "regex-2024.7.24-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33e2614a7ce627f0cdf2ad104797d1f68342d967de3695678c0cb84f530709f8"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d33a0021893ede5969876052796165bab6006559ab845fd7b515a30abdd990dc"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04ce29e2c5fedf296b1a1b0acc1724ba93a36fb14031f3abfb7abda2806c1535"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b16582783f44fbca6fcf46f61347340c787d7530d88b4d590a397a47583f31dd"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:836d3cc225b3e8a943d0b02633fb2f28a66e281290302a79df0e1eaa984ff7c1"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:438d9f0f4bc64e8dea78274caa5af971ceff0f8771e1a2333620969936ba10be"}, - {file = "regex-2024.7.24-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:973335b1624859cb0e52f96062a28aa18f3a5fc77a96e4a3d6d76e29811a0e6e"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c5e69fd3eb0b409432b537fe3c6f44ac089c458ab6b78dcec14478422879ec5f"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fbf8c2f00904eaf63ff37718eb13acf8e178cb940520e47b2f05027f5bb34ce3"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2757ace61bc4061b69af19e4689fa4416e1a04840f33b441034202b5cd02d4"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:44fc61b99035fd9b3b9453f1713234e5a7c92a04f3577252b45feefe1b327759"}, - {file = "regex-2024.7.24-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:84c312cdf839e8b579f504afcd7b65f35d60b6285d892b19adea16355e8343c9"}, - {file = "regex-2024.7.24-cp39-cp39-win32.whl", hash = "sha256:ca5b2028c2f7af4e13fb9fc29b28d0ce767c38c7facdf64f6c2cd040413055f1"}, - {file = "regex-2024.7.24-cp39-cp39-win_amd64.whl", hash = "sha256:7c479f5ae937ec9985ecaf42e2e10631551d909f203e31308c12d703922742f9"}, - {file = "regex-2024.7.24.tar.gz", hash = "sha256:9cfd009eed1a46b27c14039ad5bbc5e71b6367c5b2e6d5f5da0ea91600817506"}, + {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, + {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, + {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"}, + {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"}, + {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"}, + {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"}, + {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"}, + {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"}, + {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"}, + {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"}, + {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"}, + {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"}, + {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"}, + {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"}, + {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"}, + {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"}, + {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"}, + {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"}, + {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"}, + {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"}, + {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"}, + {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"}, + {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"}, + {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"}, + {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"}, + {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"}, + {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"}, + {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"}, + {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"}, + {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"}, + {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"}, + {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"}, + {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"}, + {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"}, + {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"}, + {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"}, + {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"}, + {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"}, + {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"}, + {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"}, + {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"}, + {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"}, + {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"}, ] [[package]] @@ -4577,19 +5106,23 @@ files = [ [[package]] name = "setuptools" -version = "73.0.1" +version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-73.0.1-py3-none-any.whl", hash = "sha256:b208925fcb9f7af924ed2dc04708ea89791e24bde0d3020b27df0e116088b34e"}, - {file = "setuptools-73.0.1.tar.gz", hash = "sha256:d59a3e788ab7e012ab2c4baed1b376da6366883ee20d7a5fc426816e3d7b1193"}, + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, ] [package.extras] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "singledispatch" @@ -4915,13 +5448,13 @@ files = [ [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] @@ -4985,13 +5518,13 @@ files = [ [[package]] name = "tzdata" -version = "2024.1" +version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, + {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] [[package]] @@ -5007,13 +5540,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -5202,29 +5735,146 @@ pyyaml = "*" [package.extras] dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] +[[package]] +name = "yarl" +version = "1.15.2" +description = "Yet another URL library" +optional = true +python-versions = ">=3.8" +files = [ + {file = "yarl-1.15.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e4ee8b8639070ff246ad3649294336b06db37a94bdea0d09ea491603e0be73b8"}, + {file = "yarl-1.15.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7cf963a357c5f00cb55b1955df8bbe68d2f2f65de065160a1c26b85a1e44172"}, + {file = "yarl-1.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:43ebdcc120e2ca679dba01a779333a8ea76b50547b55e812b8b92818d604662c"}, + {file = "yarl-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3433da95b51a75692dcf6cc8117a31410447c75a9a8187888f02ad45c0a86c50"}, + {file = "yarl-1.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d0124fa992dbacd0c48b1b755d3ee0a9f924f427f95b0ef376556a24debf01"}, + {file = "yarl-1.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ded1b1803151dd0f20a8945508786d57c2f97a50289b16f2629f85433e546d47"}, + {file = "yarl-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace4cad790f3bf872c082366c9edd7f8f8f77afe3992b134cfc810332206884f"}, + {file = "yarl-1.15.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c77494a2f2282d9bbbbcab7c227a4d1b4bb829875c96251f66fb5f3bae4fb053"}, + {file = "yarl-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b7f227ca6db5a9fda0a2b935a2ea34a7267589ffc63c8045f0e4edb8d8dcf956"}, + {file = "yarl-1.15.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:31561a5b4d8dbef1559b3600b045607cf804bae040f64b5f5bca77da38084a8a"}, + {file = "yarl-1.15.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3e52474256a7db9dcf3c5f4ca0b300fdea6c21cca0148c8891d03a025649d935"}, + {file = "yarl-1.15.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0e1af74a9529a1137c67c887ed9cde62cff53aa4d84a3adbec329f9ec47a3936"}, + {file = "yarl-1.15.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:15c87339490100c63472a76d87fe7097a0835c705eb5ae79fd96e343473629ed"}, + {file = "yarl-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:74abb8709ea54cc483c4fb57fb17bb66f8e0f04438cff6ded322074dbd17c7ec"}, + {file = "yarl-1.15.2-cp310-cp310-win32.whl", hash = "sha256:ffd591e22b22f9cb48e472529db6a47203c41c2c5911ff0a52e85723196c0d75"}, + {file = "yarl-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:1695497bb2a02a6de60064c9f077a4ae9c25c73624e0d43e3aa9d16d983073c2"}, + {file = "yarl-1.15.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5"}, + {file = "yarl-1.15.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e"}, + {file = "yarl-1.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d"}, + {file = "yarl-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417"}, + {file = "yarl-1.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b"}, + {file = "yarl-1.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf"}, + {file = "yarl-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c"}, + {file = "yarl-1.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046"}, + {file = "yarl-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04"}, + {file = "yarl-1.15.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2"}, + {file = "yarl-1.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747"}, + {file = "yarl-1.15.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb"}, + {file = "yarl-1.15.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931"}, + {file = "yarl-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5"}, + {file = "yarl-1.15.2-cp311-cp311-win32.whl", hash = "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d"}, + {file = "yarl-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179"}, + {file = "yarl-1.15.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94"}, + {file = "yarl-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e"}, + {file = "yarl-1.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178"}, + {file = "yarl-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c"}, + {file = "yarl-1.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6"}, + {file = "yarl-1.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367"}, + {file = "yarl-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f"}, + {file = "yarl-1.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46"}, + {file = "yarl-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897"}, + {file = "yarl-1.15.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f"}, + {file = "yarl-1.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc"}, + {file = "yarl-1.15.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5"}, + {file = "yarl-1.15.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715"}, + {file = "yarl-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b"}, + {file = "yarl-1.15.2-cp312-cp312-win32.whl", hash = "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8"}, + {file = "yarl-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d"}, + {file = "yarl-1.15.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84"}, + {file = "yarl-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33"}, + {file = "yarl-1.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2"}, + {file = "yarl-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611"}, + {file = "yarl-1.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904"}, + {file = "yarl-1.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548"}, + {file = "yarl-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b"}, + {file = "yarl-1.15.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368"}, + {file = "yarl-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb"}, + {file = "yarl-1.15.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b"}, + {file = "yarl-1.15.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b"}, + {file = "yarl-1.15.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a"}, + {file = "yarl-1.15.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644"}, + {file = "yarl-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe"}, + {file = "yarl-1.15.2-cp313-cp313-win32.whl", hash = "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9"}, + {file = "yarl-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad"}, + {file = "yarl-1.15.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fbbb63bed5fcd70cd3dd23a087cd78e4675fb5a2963b8af53f945cbbca79ae16"}, + {file = "yarl-1.15.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2e93b88ecc8f74074012e18d679fb2e9c746f2a56f79cd5e2b1afcf2a8a786b"}, + {file = "yarl-1.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af8ff8d7dc07ce873f643de6dfbcd45dc3db2c87462e5c387267197f59e6d776"}, + {file = "yarl-1.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66f629632220a4e7858b58e4857927dd01a850a4cef2fb4044c8662787165cf7"}, + {file = "yarl-1.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:833547179c31f9bec39b49601d282d6f0ea1633620701288934c5f66d88c3e50"}, + {file = "yarl-1.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aa738e0282be54eede1e3f36b81f1e46aee7ec7602aa563e81e0e8d7b67963f"}, + {file = "yarl-1.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a13a07532e8e1c4a5a3afff0ca4553da23409fad65def1b71186fb867eeae8d"}, + {file = "yarl-1.15.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c45817e3e6972109d1a2c65091504a537e257bc3c885b4e78a95baa96df6a3f8"}, + {file = "yarl-1.15.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:670eb11325ed3a6209339974b276811867defe52f4188fe18dc49855774fa9cf"}, + {file = "yarl-1.15.2-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:d417a4f6943112fae3924bae2af7112562285848d9bcee737fc4ff7cbd450e6c"}, + {file = "yarl-1.15.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bc8936d06cd53fddd4892677d65e98af514c8d78c79864f418bbf78a4a2edde4"}, + {file = "yarl-1.15.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:954dde77c404084c2544e572f342aef384240b3e434e06cecc71597e95fd1ce7"}, + {file = "yarl-1.15.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5bc0df728e4def5e15a754521e8882ba5a5121bd6b5a3a0ff7efda5d6558ab3d"}, + {file = "yarl-1.15.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b71862a652f50babab4a43a487f157d26b464b1dedbcc0afda02fd64f3809d04"}, + {file = "yarl-1.15.2-cp38-cp38-win32.whl", hash = "sha256:63eab904f8630aed5a68f2d0aeab565dcfc595dc1bf0b91b71d9ddd43dea3aea"}, + {file = "yarl-1.15.2-cp38-cp38-win_amd64.whl", hash = "sha256:2cf441c4b6e538ba0d2591574f95d3fdd33f1efafa864faa077d9636ecc0c4e9"}, + {file = "yarl-1.15.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a32d58f4b521bb98b2c0aa9da407f8bd57ca81f34362bcb090e4a79e9924fefc"}, + {file = "yarl-1.15.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:766dcc00b943c089349d4060b935c76281f6be225e39994c2ccec3a2a36ad627"}, + {file = "yarl-1.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bed1b5dbf90bad3bfc19439258c97873eab453c71d8b6869c136346acfe497e7"}, + {file = "yarl-1.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed20a4bdc635f36cb19e630bfc644181dd075839b6fc84cac51c0f381ac472e2"}, + {file = "yarl-1.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d538df442c0d9665664ab6dd5fccd0110fa3b364914f9c85b3ef9b7b2e157980"}, + {file = "yarl-1.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c6cf1d92edf936ceedc7afa61b07e9d78a27b15244aa46bbcd534c7458ee1b"}, + {file = "yarl-1.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce44217ad99ffad8027d2fde0269ae368c86db66ea0571c62a000798d69401fb"}, + {file = "yarl-1.15.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47a6000a7e833ebfe5886b56a31cb2ff12120b1efd4578a6fcc38df16cc77bd"}, + {file = "yarl-1.15.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e52f77a0cd246086afde8815039f3e16f8d2be51786c0a39b57104c563c5cbb0"}, + {file = "yarl-1.15.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:f9ca0e6ce7774dc7830dc0cc4bb6b3eec769db667f230e7c770a628c1aa5681b"}, + {file = "yarl-1.15.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:136f9db0f53c0206db38b8cd0c985c78ded5fd596c9a86ce5c0b92afb91c3a19"}, + {file = "yarl-1.15.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:173866d9f7409c0fb514cf6e78952e65816600cb888c68b37b41147349fe0057"}, + {file = "yarl-1.15.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:6e840553c9c494a35e449a987ca2c4f8372668ee954a03a9a9685075228e5036"}, + {file = "yarl-1.15.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:458c0c65802d816a6b955cf3603186de79e8fdb46d4f19abaec4ef0a906f50a7"}, + {file = "yarl-1.15.2-cp39-cp39-win32.whl", hash = "sha256:5b48388ded01f6f2429a8c55012bdbd1c2a0c3735b3e73e221649e524c34a58d"}, + {file = "yarl-1.15.2-cp39-cp39-win_amd64.whl", hash = "sha256:81dadafb3aa124f86dc267a2168f71bbd2bfb163663661ab0038f6e4b8edb810"}, + {file = "yarl-1.15.2-py3-none-any.whl", hash = "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a"}, + {file = "yarl-1.15.2.tar.gz", hash = "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.0" + [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] aci = ["PyYAML"] -all = ["Jinja2", "PyYAML", "cloudvision", "cvprac", "dnacentersdk", "dnspython", "ijson", "ipfabric", "nautobot-device-lifecycle-mgmt", "netutils", "oauthlib", "python-magic", "pytz", "requests", "requests-oauthlib", "six"] +all = ["Jinja2", "PyYAML", "cloudvision", "cvprac", "dnacentersdk", "dnspython", "ijson", "ipfabric", "meraki", "nautobot-device-lifecycle-mgmt", "netutils", "oauthlib", "python-magic", "pytz", "requests", "requests-oauthlib", "six"] aristacv = ["cloudvision", "cvprac"] device42 = ["requests"] dna-center = ["dnacentersdk", "netutils"] infoblox = ["dnspython"] ipfabric = ["httpx", "ipfabric", "netutils"] +meraki = ["meraki"] nautobot-device-lifecycle-mgmt = ["nautobot-device-lifecycle-mgmt"] pysnow = ["ijson", "oauthlib", "python-magic", "pytz", "requests", "requests-oauthlib", "six"] servicenow = ["Jinja2", "PyYAML", "ijson", "oauthlib", "python-magic", "pytz", "requests", "requests-oauthlib", "six"] @@ -5232,4 +5882,4 @@ servicenow = ["Jinja2", "PyYAML", "ijson", "oauthlib", "python-magic", "pytz", " [metadata] lock-version = "2.0" python-versions = ">=3.8,<3.13" -content-hash = "0969d1260d94aabfb9612f6faae17bb0c7f76dbe37ceda3474817c53e3257157" +content-hash = "f2319b7b5319197b7b329fa8325e1fdda18528d29a346e625b307fef97622706" diff --git a/pyproject.toml b/pyproject.toml index 964af4209..36e087a96 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "nautobot-ssot" -version = "3.1.0" +version = "3.2.0" description = "Nautobot Single Source of Truth" authors = ["Network to Code, LLC "] license = "Apache-2.0" @@ -55,10 +55,11 @@ httpx = { version = ">=0.23.3", optional = true } retry = "^0.9.2" # used for DNA Center integration dnacentersdk = { version = "^2.5.6", optional = true } +meraki = { version = "^1.37.2,<1.46.0", optional = true } [tool.poetry.group.dev.dependencies] coverage = "*" -django-debug-toolbar = "*" +django-debug-toolbar = "<4.4" invoke = "*" ipython = "*" jedi = "^0.17.2" @@ -118,6 +119,7 @@ all = [ "requests-oauthlib", "six", "dnacentersdk", + "meraki" ] aristacv = [ "cloudvision", @@ -138,6 +140,9 @@ ipfabric = [ "ipfabric", "netutils", ] +meraki = [ + "meraki", +] # pysnow = "^0.7.17" # PySNow is currently pinned to an older version of pytz as a dependency, which blocks compatibility with newer # versions of Nautobot. See https://github.com/rbw/pysnow/pull/186 diff --git a/tasks.py b/tasks.py index 3006f401b..0ee4210e9 100644 --- a/tasks.py +++ b/tasks.py @@ -14,11 +14,12 @@ import os import re +import sys from pathlib import Path from time import sleep from invoke.collection import Collection -from invoke.exceptions import Exit +from invoke.exceptions import Exit, UnexpectedExit from invoke.tasks import task as invoke_task @@ -72,9 +73,7 @@ def _is_compose_included(context, name): def _await_healthy_service(context, service): - container_id = docker_compose( - context, f"ps -q -- {service}", pty=False, echo=False, hide=True - ).stdout.strip() + container_id = docker_compose(context, f"ps -q -- {service}", pty=False, echo=False, hide=True).stdout.strip() _await_healthy_container(context, container_id) @@ -166,9 +165,7 @@ def docker_compose(context, command, **kwargs): ] for compose_file in context.nautobot_ssot.compose_files: - compose_file_path = os.path.join( - context.nautobot_ssot.compose_dir, compose_file - ) + compose_file_path = os.path.join(context.nautobot_ssot.compose_dir, compose_file) compose_command_tokens.append(f' -f "{compose_file_path}"') compose_command_tokens.append(command) @@ -189,23 +186,23 @@ def run_command(context, command, **kwargs): env = _read_command_env(kwargs.pop("env", None)) if is_truthy(context.nautobot_ssot.local): return context.run(command, **kwargs, env=env) + else: + # Check if nautobot is running, no need to start another nautobot container to run a command + docker_compose_status = "ps --services --filter status=running" + results = docker_compose(context, docker_compose_status, hide="out") - # Check if nautobot is running, no need to start another nautobot container to run a command - docker_compose_status = "ps --services --filter status=running" - results = docker_compose(context, docker_compose_status, hide="out") - - command_env_args = "" - for env_name in env: - command_env_args += f" --env={env_name}" + command_env_args = "" + for env_name in env: + command_env_args += f" --env={env_name}" - if "nautobot" in results.stdout: - compose_command = f"exec{command_env_args} nautobot {command}" - else: - compose_command = f"run{command_env_args} --rm --entrypoint='{command}' nautobot" + if "nautobot" in results.stdout: + compose_command = f"exec{command_env_args} nautobot {command}" + else: + compose_command = f"run{command_env_args} --rm --entrypoint='{command}' nautobot" - pty = kwargs.pop("pty", True) + pty = kwargs.pop("pty", True) - return docker_compose(context, compose_command, **kwargs, pty=pty, env=env) + return docker_compose(context, compose_command, **kwargs, pty=pty, env=env) # ------------------------------------------------------------------------------ @@ -244,20 +241,10 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None): if python_ver is None: python_ver = context.nautobot_ssot.python_ver dockerfile_path = os.path.join(context.nautobot_ssot.compose_dir, "Dockerfile") - base_image = ( - context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True) - .stdout.strip() - .split(" ")[1] - ) - base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace( - r"${PYTHON_VER}", python_ver - ) - pip_nautobot_ver = context.run( - f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True - ) - match_version = re.search( - r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE - ) + base_image = context.run(f"grep --max-count=1 '^FROM ' {dockerfile_path}", hide=True).stdout.strip().split(" ")[1] + base_image = base_image.replace(r"${NAUTOBOT_VER}", nautobot_ver).replace(r"${PYTHON_VER}", python_ver) + pip_nautobot_ver = context.run(f"docker run --rm --entrypoint '' {base_image} pip show nautobot", hide=True) + match_version = re.search(r"^Version: (.+)$", pip_nautobot_ver.stdout.strip(), flags=re.MULTILINE) if match_version: return match_version.group(1) else: @@ -282,18 +269,28 @@ def _get_docker_nautobot_version(context, nautobot_ver=None, python_ver=None): ), } ) -def lock( - context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False -): +def lock(context, check=False, constrain_nautobot_ver=False, constrain_python_ver=False): """Generate poetry.lock file.""" if constrain_nautobot_ver: docker_nautobot_version = _get_docker_nautobot_version(context) command = f"poetry add --lock nautobot@{docker_nautobot_version}" if constrain_python_ver: command += f" --python {context.nautobot_ssot.python_ver}" + try: + output = run_command(context, command, hide=True) + print(output.stdout, end="") + print(output.stderr, file=sys.stderr, end="") + except UnexpectedExit: + print("Unable to add Nautobot dependency with version constraint, falling back to git branch.") + command = ( + f"poetry add --lock git+https://github.com/nautobot/nautobot.git#{context.nautobot_ssot.nautobot_ver}" + ) + if constrain_python_ver: + command += f" --python {context.nautobot_ssot.python_ver}" + run_command(context, command) else: command = f"poetry {'check' if check else 'lock --no-update'}" - run_command(context, command) + run_command(context, command) # ------------------------------------------------------------------------------ @@ -324,9 +321,7 @@ def restart(context, service=""): def stop(context, service=""): """Stop specified or all services, if service is not specified, remove all containers.""" print("Stopping Nautobot...") - docker_compose( - context, "stop" if service else "down --remove-orphans", service=service - ) + docker_compose(context, "stop" if service else "down --remove-orphans", service=service) @task( @@ -345,9 +340,7 @@ def destroy(context, volumes=True, import_db_file=""): return if not volumes: - raise ValueError( - "Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time." - ) + raise ValueError("Cannot specify `--no-volumes` and `--import-db-file` arguments at the same time.") print(f"Importing database file: {import_db_file}...") @@ -364,16 +357,12 @@ def destroy(context, volumes=True, import_db_file=""): "db", ] - container_id = docker_compose( - context, " ".join(command), pty=False, echo=False, hide=True - ).stdout.strip() + container_id = docker_compose(context, " ".join(command), pty=False, echo=False, hide=True).stdout.strip() _await_healthy_container(context, container_id) print("Stopping database container...") context.run(f"docker stop {container_id}", pty=False, echo=False, hide=True) - print( - "Database import complete, you can start Nautobot with the following command:" - ) + print("Database import complete, you can start Nautobot with the following command:") print("invoke start") @@ -549,9 +538,7 @@ def dbshell(context, db_name="", input_file="", output_file="", query=""): if input_file and query: raise ValueError("Cannot specify both, `input_file` and `query` arguments") if output_file and not (input_file or query): - raise ValueError( - "`output_file` argument requires `input_file` or `query` argument" - ) + raise ValueError("`output_file` argument requires `input_file` or `query` argument") env = {} if query: @@ -689,9 +676,7 @@ def backup_db(context, db_name="", output_file="dump.sql", readable=True): docker_compose(context, " ".join(command), pty=False) print(50 * "=") - print( - "The database backup has been successfully completed and saved to the following file:" - ) + print("The database backup has been successfully completed and saved to the following file:") print(output_file) print("You can import this database backup with the following command:") print(f"invoke import-db --input-file '{output_file}'") @@ -739,10 +724,13 @@ def help_task(context): ) def generate_release_notes(context, version=""): """Generate Release Notes using Towncrier.""" - command = "env DJANGO_SETTINGS_MODULE=nautobot.core.settings towncrier build" + command = "poetry run towncrier build" if version: command += f" --version {version}" - run_command(context, command) + else: + command += " --version `poetry version -s`" + # Due to issues with git repo ownership in the containers, this must always run locally. + context.run(command) # ------------------------------------------------------------------------------ @@ -760,8 +748,27 @@ def hadolint(context): @task def pylint(context): """Run pylint code analysis.""" - command = 'pylint --init-hook "import nautobot; nautobot.setup()" --rcfile pyproject.toml nautobot_ssot' - run_command(context, command) + exit_code = 0 + + base_pylint_command = 'pylint --verbose --init-hook "import nautobot; nautobot.setup()" --rcfile pyproject.toml' + command = f"{base_pylint_command} nautobot_ssot" + if not run_command(context, command, warn=True): + exit_code = 1 + + # run the pylint_django migrations checkers on the migrations directory, if one exists + migrations_dir = Path(__file__).absolute().parent / Path("nautobot_ssot") / Path("migrations") + if migrations_dir.is_dir(): + migrations_pylint_command = ( + f"{base_pylint_command} --load-plugins=pylint_django.checkers.migrations" + " --disable=all --enable=fatal,new-db-field-with-default,missing-backwards-migration-callable" + " nautobot_ssot.migrations" + ) + if not run_command(context, migrations_pylint_command, warn=True): + exit_code = 1 + else: + print("No migrations directory found, skipping migrations checks.") + + raise Exit(code=exit_code) @task(aliases=("a",)) @@ -786,12 +793,15 @@ def ruff(context, action=None, target=None, fix=False, output_format="concise"): if not target: target = ["."] + exit_code = 0 + if "format" in action: command = "ruff format " if not fix: command += "--check " command += " ".join(target) - run_command(context, command, warn=True) + if not run_command(context, command, warn=True): + exit_code = 1 if "lint" in action: command = "ruff check " @@ -799,7 +809,10 @@ def ruff(context, action=None, target=None, fix=False, output_format="concise"): command += "--fix " command += f"--output-format {output_format} " command += " ".join(target) - run_command(context, command, warn=True) + if not run_command(context, command, warn=True): + exit_code = 1 + + raise Exit(code=exit_code) @task @@ -860,9 +873,7 @@ def unittest( # noqa: PLR0913 @task def unittest_coverage(context): """Report on code test coverage as measured by 'invoke unittest'.""" - command = ( - "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*" - ) + command = "coverage report --skip-covered --include 'nautobot_ssot/*' --omit *migrations*" run_command(context, command)