diff --git a/sdk/databricks/azure-mgmt-databricks/MANIFEST.in b/sdk/databricks/azure-mgmt-databricks/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/databricks/azure-mgmt-databricks/MANIFEST.in +++ b/sdk/databricks/azure-mgmt-databricks/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/databricks/azure-mgmt-databricks/_meta.json b/sdk/databricks/azure-mgmt-databricks/_meta.json new file mode 100644 index 000000000000..226316ddd10d --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.4.2", + "use": [ + "@autorest/python@5.8.1", + "@autorest/modelerfour@4.19.2" + ], + "commit": "fa3ba1acdd45ddad8950133befc5b0a6f1ee5163", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/databricks/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.1 --use=@autorest/modelerfour@4.19.2 --version=3.4.2", + "readme": "specification/databricks/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py index 809e7a34ce72..00df8e78c17d 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py @@ -6,11 +6,11 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._databricks_client import DatabricksClient +from ._azure_databricks_management_client import AzureDatabricksManagementClient from ._version import VERSION __version__ = VERSION -__all__ = ['DatabricksClient'] +__all__ = ['AzureDatabricksManagementClient'] try: from ._patch import patch_sdk # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_databricks_client.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py similarity index 51% rename from sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_databricks_client.py rename to sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py index 0c18f909b850..d0ddf4a60c74 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_databricks_client.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py @@ -16,23 +16,30 @@ from typing import Any, Optional from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse -from ._configuration import DatabricksClientConfiguration +from ._configuration import AzureDatabricksManagementClientConfiguration from .operations import WorkspacesOperations -from .operations import VNetPeeringOperations from .operations import Operations +from .operations import PrivateLinkResourcesOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import VNetPeeringOperations from . import models -class DatabricksClient(object): - """ARM Databricks. +class AzureDatabricksManagementClient(object): + """The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace resources. :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure.mgmt.databricks.operations.WorkspacesOperations - :ivar vnet_peering: VNetPeeringOperations operations - :vartype vnet_peering: azure.mgmt.databricks.operations.VNetPeeringOperations + :vartype workspaces: azure_databricks_management_client.operations.WorkspacesOperations :ivar operations: Operations operations - :vartype operations: azure.mgmt.databricks.operations.Operations + :vartype operations: azure_databricks_management_client.operations.Operations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure_databricks_management_client.operations.PrivateLinkResourcesOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: azure_databricks_management_client.operations.PrivateEndpointConnectionsOperations + :ivar vnet_peering: VNetPeeringOperations operations + :vartype vnet_peering: azure_databricks_management_client.operations.VNetPeeringOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The ID of the target subscription. @@ -51,7 +58,7 @@ def __init__( # type: (...) -> None if not base_url: base_url = 'https://management.azure.com' - self._config = DatabricksClientConfiguration(credential, subscription_id, **kwargs) + self._config = AzureDatabricksManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} @@ -61,17 +68,39 @@ def __init__( self.workspaces = WorkspacesOperations( self._client, self._config, self._serialize, self._deserialize) - self.vnet_peering = VNetPeeringOperations( - self._client, self._config, self._serialize, self._deserialize) self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.vnet_peering = VNetPeeringOperations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response def close(self): # type: () -> None self._client.close() def __enter__(self): - # type: () -> DatabricksClient + # type: () -> AzureDatabricksManagementClient self._client.__enter__() return self diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py index 6ecb0775935e..37c43da379a8 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py @@ -21,8 +21,8 @@ from azure.core.credentials import TokenCredential -class DatabricksClientConfiguration(Configuration): - """Configuration for DatabricksClient. +class AzureDatabricksManagementClientConfiguration(Configuration): + """Configuration for AzureDatabricksManagementClient. Note that all parameters used to create this instance are saved as instance attributes. @@ -44,11 +44,10 @@ def __init__( raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(DatabricksClientConfiguration, self).__init__(**kwargs) + super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.api_version = "2018-04-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-databricks/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json new file mode 100644 index 000000000000..4f32e60012df --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json @@ -0,0 +1,107 @@ +{ + "chosen_version": "", + "total_api_version_list": ["2018-04-01", "2021-04-01-preview"], + "client": { + "name": "AzureDatabricksManagementClient", + "filename": "_azure_databricks_management_client", + "description": "The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace resources.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": true, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"AzureDatabricksManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"AzureDatabricksManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "The ID of the target subscription.", + "docstring_type": "str", + "required": true + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "The ID of the target subscription.", + "docstring_type": "str", + "required": true + } + }, + "constant": { + }, + "call": "credential, subscription_id", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "workspaces": "WorkspacesOperations", + "operations": "Operations", + "private_link_resources": "PrivateLinkResourcesOperations", + "private_endpoint_connections": "PrivateEndpointConnectionsOperations", + "vnet_peering": "VNetPeeringOperations" + } +} \ No newline at end of file diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py index c47f66669f1b..e5754a47ce68 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.0.0" +VERSION = "1.0.0b1" diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py index d97b828777b0..4d341ec3f5c4 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._databricks_client import DatabricksClient -__all__ = ['DatabricksClient'] +from ._azure_databricks_management_client import AzureDatabricksManagementClient +__all__ = ['AzureDatabricksManagementClient'] diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_databricks_client.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py similarity index 50% rename from sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_databricks_client.py rename to sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py index 1802066b532b..8376d40c3e1e 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_databricks_client.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py @@ -8,6 +8,7 @@ from typing import Any, Optional, TYPE_CHECKING +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer @@ -15,22 +16,28 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration import DatabricksClientConfiguration +from ._configuration import AzureDatabricksManagementClientConfiguration from .operations import WorkspacesOperations -from .operations import VNetPeeringOperations from .operations import Operations +from .operations import PrivateLinkResourcesOperations +from .operations import PrivateEndpointConnectionsOperations +from .operations import VNetPeeringOperations from .. import models -class DatabricksClient(object): - """ARM Databricks. +class AzureDatabricksManagementClient(object): + """The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace resources. :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure.mgmt.databricks.aio.operations.WorkspacesOperations - :ivar vnet_peering: VNetPeeringOperations operations - :vartype vnet_peering: azure.mgmt.databricks.aio.operations.VNetPeeringOperations + :vartype workspaces: azure_databricks_management_client.aio.operations.WorkspacesOperations :ivar operations: Operations operations - :vartype operations: azure.mgmt.databricks.aio.operations.Operations + :vartype operations: azure_databricks_management_client.aio.operations.Operations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: azure_databricks_management_client.aio.operations.PrivateLinkResourcesOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: azure_databricks_management_client.aio.operations.PrivateEndpointConnectionsOperations + :ivar vnet_peering: VNetPeeringOperations operations + :vartype vnet_peering: azure_databricks_management_client.aio.operations.VNetPeeringOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. @@ -48,7 +55,7 @@ def __init__( ) -> None: if not base_url: base_url = 'https://management.azure.com' - self._config = DatabricksClientConfiguration(credential, subscription_id, **kwargs) + self._config = AzureDatabricksManagementClientConfiguration(credential, subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} @@ -58,15 +65,36 @@ def __init__( self.workspaces = WorkspacesOperations( self._client, self._config, self._serialize, self._deserialize) - self.vnet_peering = VNetPeeringOperations( - self._client, self._config, self._serialize, self._deserialize) self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.vnet_peering = VNetPeeringOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "DatabricksClient": + async def __aenter__(self) -> "AzureDatabricksManagementClient": await self._client.__aenter__() return self diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py index 4be80d9ec7ce..a06bbe6c18f7 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py @@ -19,8 +19,8 @@ from azure.core.credentials_async import AsyncTokenCredential -class DatabricksClientConfiguration(Configuration): - """Configuration for DatabricksClient. +class AzureDatabricksManagementClientConfiguration(Configuration): + """Configuration for AzureDatabricksManagementClient. Note that all parameters used to create this instance are saved as instance attributes. @@ -41,11 +41,10 @@ def __init__( raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(DatabricksClientConfiguration, self).__init__(**kwargs) + super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.api_version = "2018-04-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-databricks/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py index 7688e37c6383..6fc97620650a 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py @@ -7,11 +7,15 @@ # -------------------------------------------------------------------------- from ._workspaces_operations import WorkspacesOperations -from ._vnet_peering_operations import VNetPeeringOperations from ._operations import Operations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._vnet_peering_operations import VNetPeeringOperations __all__ = [ 'WorkspacesOperations', - 'VNetPeeringOperations', 'Operations', + 'PrivateLinkResourcesOperations', + 'PrivateEndpointConnectionsOperations', + 'VNetPeeringOperations', ] diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py index 2f053998c665..9a3c8f881553 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py @@ -26,7 +26,7 @@ class Operations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databricks.models + :type models: ~azure_databricks_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -43,13 +43,13 @@ def __init__(self, client, config, serializer, deserializer) -> None: def list( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: """Lists all of the available RP operations. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.OperationListResult] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] @@ -57,7 +57,7 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -93,7 +93,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..291c6ec83a02 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,442 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionsOperations: + """PrivateEndpointConnectionsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_databricks_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnectionsList"]: + """List private endpoint connections. + + List private endpoint connections of the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionsList or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.PrivateEndpointConnectionsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionsList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionsList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> "_models.PrivateEndpointConnection": + """Get private endpoint connection. + + Get a private endpoint connection properties for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure_databricks_management_client.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def _create_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: "_models.PrivateEndpointConnection", + **kwargs: Any + ) -> "_models.PrivateEndpointConnection": + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint_connection, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def begin_create( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: "_models.PrivateEndpointConnection", + **kwargs: Any + ) -> AsyncLROPoller["_models.PrivateEndpointConnection"]: + """Update private endpoint connection status. + + Update the status of a private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. + :type private_endpoint_connection_name: str + :param private_endpoint_connection: The private endpoint connection with updated properties. + :type private_endpoint_connection: ~azure_databricks_management_client.models.PrivateEndpointConnection + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + private_endpoint_connection=private_endpoint_connection, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Remove private endpoint connection. + + Remove private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..d8d70c948f4f --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py @@ -0,0 +1,184 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations: + """PrivateLinkResourcesOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_databricks_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.PrivateLinkResourcesList"]: + """List private link resources. + + List private link resources for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateLinkResourcesList or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.PrivateLinkResourcesList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateLinkResourcesList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + group_id: str, + **kwargs: Any + ) -> "_models.GroupIdInformation": + """Get the specified private link resource. + + Get the specified private link resource for the given group id (sub-resource). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param group_id: The name of the private link resource. + :type group_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GroupIdInformation, or the result of cls(response) + :rtype: ~azure_databricks_management_client.models.GroupIdInformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GroupIdInformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'groupId': self._serialize.url("group_id", group_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GroupIdInformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}'} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py index 4b3ca5d6bc52..277be5245873 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py @@ -28,7 +28,7 @@ class VNetPeeringOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databricks.models + :type models: ~azure_databricks_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -48,7 +48,7 @@ async def get( resource_group_name: str, workspace_name: str, peering_name: str, - **kwargs + **kwargs: Any ) -> Optional["_models.VirtualNetworkPeering"]: """Gets the workspace vNet Peering. @@ -60,7 +60,7 @@ async def get( :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualNetworkPeering, or the result of cls(response) - :rtype: ~azure.mgmt.databricks.models.VirtualNetworkPeering or None + :rtype: ~azure_databricks_management_client.models.VirtualNetworkPeering or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkPeering"]] @@ -95,7 +95,7 @@ async def get( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -113,7 +113,7 @@ async def _delete_initial( resource_group_name: str, workspace_name: str, peering_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { @@ -147,7 +147,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -160,7 +160,7 @@ async def begin_delete( resource_group_name: str, workspace_name: str, peering_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the workspace vNetPeering. @@ -172,8 +172,8 @@ async def begin_delete( :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -230,7 +230,7 @@ async def _create_or_update_initial( workspace_name: str, peering_name: str, virtual_network_peering_parameters: "_models.VirtualNetworkPeering", - **kwargs + **kwargs: Any ) -> "_models.VirtualNetworkPeering": cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"] error_map = { @@ -269,7 +269,7 @@ async def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -290,7 +290,7 @@ async def begin_create_or_update( workspace_name: str, peering_name: str, virtual_network_peering_parameters: "_models.VirtualNetworkPeering", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.VirtualNetworkPeering"]: """Creates vNet Peering for workspace. @@ -302,15 +302,15 @@ async def begin_create_or_update( :type peering_name: str :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet Peering. - :type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering + :type virtual_network_peering_parameters: ~azure_databricks_management_client.models.VirtualNetworkPeering :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.VirtualNetworkPeering] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -365,7 +365,7 @@ def list_by_workspace( self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.VirtualNetworkPeeringList"]: """Lists the workspace vNet Peerings. @@ -375,7 +375,7 @@ def list_by_workspace( :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualNetworkPeeringList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.VirtualNetworkPeeringList] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.VirtualNetworkPeeringList] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringList"] @@ -425,7 +425,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py index b9e02fa3ae05..a95a73ba1a34 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py @@ -28,7 +28,7 @@ class WorkspacesOperations: instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databricks.models + :type models: ~azure_databricks_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -47,7 +47,7 @@ async def get( self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> "_models.Workspace": """Gets the workspace. @@ -57,7 +57,7 @@ async def get( :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace, or the result of cls(response) - :rtype: ~azure.mgmt.databricks.models.Workspace + :rtype: ~azure_databricks_management_client.models.Workspace :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] @@ -65,7 +65,7 @@ async def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" # Construct URL @@ -91,7 +91,7 @@ async def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Workspace', pipeline_response) @@ -106,14 +106,14 @@ async def _delete_initial( self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" # Construct URL @@ -139,7 +139,7 @@ async def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -151,7 +151,7 @@ async def begin_delete( self, resource_group_name: str, workspace_name: str, - **kwargs + **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the workspace. @@ -161,8 +161,8 @@ async def begin_delete( :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) @@ -216,14 +216,14 @@ async def _create_or_update_initial( resource_group_name: str, workspace_name: str, parameters: "_models.Workspace", - **kwargs + **kwargs: Any ) -> "_models.Workspace": cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -254,7 +254,7 @@ async def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -274,7 +274,7 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, parameters: "_models.Workspace", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.Workspace"]: """Creates a new workspace. @@ -283,15 +283,15 @@ async def begin_create_or_update( :param workspace_name: The name of the workspace. :type workspace_name: str :param parameters: Parameters supplied to the create or update a workspace. - :type parameters: ~azure.mgmt.databricks.models.Workspace + :type parameters: ~azure_databricks_management_client.models.Workspace :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -345,14 +345,14 @@ async def _update_initial( resource_group_name: str, workspace_name: str, parameters: "_models.WorkspaceUpdate", - **kwargs + **kwargs: Any ) -> Optional["_models.Workspace"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -383,7 +383,7 @@ async def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -401,7 +401,7 @@ async def begin_update( resource_group_name: str, workspace_name: str, parameters: "_models.WorkspaceUpdate", - **kwargs + **kwargs: Any ) -> AsyncLROPoller["_models.Workspace"]: """Updates a workspace. @@ -410,15 +410,15 @@ async def begin_update( :param workspace_name: The name of the workspace. :type workspace_name: str :param parameters: The update to the workspace. - :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate + :type parameters: ~azure_databricks_management_client.models.WorkspaceUpdate :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be AsyncARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] @@ -470,7 +470,7 @@ def get_long_running_output(pipeline_response): def list_by_resource_group( self, resource_group_name: str, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.WorkspaceListResult"]: """Gets all the workspaces within a resource group. @@ -478,7 +478,7 @@ def list_by_resource_group( :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.WorkspaceListResult] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] @@ -486,7 +486,7 @@ def list_by_resource_group( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -527,7 +527,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -540,13 +540,13 @@ async def get_next(next_link=None): def list_by_subscription( self, - **kwargs + **kwargs: Any ) -> AsyncIterable["_models.WorkspaceListResult"]: """Gets all the workspaces within a subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.WorkspaceListResult] + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] @@ -554,7 +554,7 @@ def list_by_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -594,7 +594,7 @@ async def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py index 4804c264bf7a..4645c40c923c 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py @@ -10,15 +10,27 @@ from ._models_py3 import AddressSpace from ._models_py3 import CreatedBy from ._models_py3 import Encryption + from ._models_py3 import EncryptionEntitiesDefinition + from ._models_py3 import EncryptionV2 + from ._models_py3 import EncryptionV2KeyVaultProperties from ._models_py3 import ErrorDetail from ._models_py3 import ErrorInfo from ._models_py3 import ErrorResponse + from ._models_py3 import GroupIdInformation + from ._models_py3 import GroupIdInformationProperties from ._models_py3 import ManagedIdentityConfiguration from ._models_py3 import Operation from ._models_py3 import OperationDisplay from ._models_py3 import OperationListResult + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateEndpointConnectionProperties + from ._models_py3 import PrivateEndpointConnectionsList + from ._models_py3 import PrivateLinkResourcesList + from ._models_py3 import PrivateLinkServiceConnectionState from ._models_py3 import Resource from ._models_py3 import Sku + from ._models_py3 import SystemData from ._models_py3 import TrackedResource from ._models_py3 import VirtualNetworkPeering from ._models_py3 import VirtualNetworkPeeringList @@ -31,21 +43,34 @@ from ._models_py3 import WorkspaceCustomStringParameter from ._models_py3 import WorkspaceEncryptionParameter from ._models_py3 import WorkspaceListResult + from ._models_py3 import WorkspacePropertiesEncryption from ._models_py3 import WorkspaceProviderAuthorization from ._models_py3 import WorkspaceUpdate except (SyntaxError, ImportError): from ._models import AddressSpace # type: ignore from ._models import CreatedBy # type: ignore from ._models import Encryption # type: ignore + from ._models import EncryptionEntitiesDefinition # type: ignore + from ._models import EncryptionV2 # type: ignore + from ._models import EncryptionV2KeyVaultProperties # type: ignore from ._models import ErrorDetail # type: ignore from ._models import ErrorInfo # type: ignore from ._models import ErrorResponse # type: ignore + from ._models import GroupIdInformation # type: ignore + from ._models import GroupIdInformationProperties # type: ignore from ._models import ManagedIdentityConfiguration # type: ignore from ._models import Operation # type: ignore from ._models import OperationDisplay # type: ignore from ._models import OperationListResult # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointConnection # type: ignore + from ._models import PrivateEndpointConnectionProperties # type: ignore + from ._models import PrivateEndpointConnectionsList # type: ignore + from ._models import PrivateLinkResourcesList # type: ignore + from ._models import PrivateLinkServiceConnectionState # type: ignore from ._models import Resource # type: ignore from ._models import Sku # type: ignore + from ._models import SystemData # type: ignore from ._models import TrackedResource # type: ignore from ._models import VirtualNetworkPeering # type: ignore from ._models import VirtualNetworkPeeringList # type: ignore @@ -58,30 +83,49 @@ from ._models import WorkspaceCustomStringParameter # type: ignore from ._models import WorkspaceEncryptionParameter # type: ignore from ._models import WorkspaceListResult # type: ignore + from ._models import WorkspacePropertiesEncryption # type: ignore from ._models import WorkspaceProviderAuthorization # type: ignore from ._models import WorkspaceUpdate # type: ignore -from ._databricks_client_enums import ( +from ._azure_databricks_management_client_enums import ( + CreatedByType, CustomParameterType, + EncryptionKeySource, KeySource, PeeringProvisioningState, PeeringState, + PrivateEndpointConnectionProvisioningState, + PrivateLinkServiceConnectionStatus, ProvisioningState, + PublicNetworkAccess, + RequiredNsgRules, ) __all__ = [ 'AddressSpace', 'CreatedBy', 'Encryption', + 'EncryptionEntitiesDefinition', + 'EncryptionV2', + 'EncryptionV2KeyVaultProperties', 'ErrorDetail', 'ErrorInfo', 'ErrorResponse', + 'GroupIdInformation', + 'GroupIdInformationProperties', 'ManagedIdentityConfiguration', 'Operation', 'OperationDisplay', 'OperationListResult', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateEndpointConnectionProperties', + 'PrivateEndpointConnectionsList', + 'PrivateLinkResourcesList', + 'PrivateLinkServiceConnectionState', 'Resource', 'Sku', + 'SystemData', 'TrackedResource', 'VirtualNetworkPeering', 'VirtualNetworkPeeringList', @@ -94,11 +138,18 @@ 'WorkspaceCustomStringParameter', 'WorkspaceEncryptionParameter', 'WorkspaceListResult', + 'WorkspacePropertiesEncryption', 'WorkspaceProviderAuthorization', 'WorkspaceUpdate', + 'CreatedByType', 'CustomParameterType', + 'EncryptionKeySource', 'KeySource', 'PeeringProvisioningState', 'PeeringState', + 'PrivateEndpointConnectionProvisioningState', + 'PrivateLinkServiceConnectionStatus', 'ProvisioningState', + 'PublicNetworkAccess', + 'RequiredNsgRules', ] diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_databricks_client_enums.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py similarity index 58% rename from sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_databricks_client_enums.py rename to sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py index 763ebf60815d..a0ccab790f46 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_databricks_client_enums.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py @@ -26,6 +26,15 @@ def __getattr__(cls, name): raise AttributeError(name) +class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + class CustomParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Provisioning status of the workspace. """ @@ -34,6 +43,12 @@ class CustomParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): OBJECT = "Object" STRING = "String" +class EncryptionKeySource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault + """ + + MICROSOFT_KEYVAULT = "Microsoft.Keyvault" + class KeySource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault @@ -59,6 +74,25 @@ class PeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): CONNECTED = "Connected" DISCONNECTED = "Disconnected" +class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The current provisioning state. + """ + + SUCCEEDED = "Succeeded" + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + FAILED = "Failed" + +class PrivateLinkServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of a private endpoint connection + """ + + PENDING = "Pending" + APPROVED = "Approved" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" + class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Provisioning status of the workspace. """ @@ -74,3 +108,21 @@ class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): FAILED = "Failed" SUCCEEDED = "Succeeded" UPDATING = "Updating" + +class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The network access type for accessing workspace. Set value to disabled to access workspace only + via private link. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class RequiredNsgRules(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Gets or sets a value indicating whether data plane (clusters) to control plane communication + happen over private endpoint. Supported values are 'AllRules' and 'NoAzureDatabricksRules'. + 'NoAzureServiceRules' value is for internal use only. + """ + + ALL_RULES = "AllRules" + NO_AZURE_DATABRICKS_RULES = "NoAzureDatabricksRules" + NO_AZURE_SERVICE_RULES = "NoAzureServiceRules" diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py index 218574a66c4a..30cd1f13f340 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py @@ -72,7 +72,7 @@ class Encryption(msrest.serialization.Model): :param key_source: The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault. Possible values include: "Default", "Microsoft.Keyvault". Default value: "Default". - :type key_source: str or ~azure.mgmt.databricks.models.KeySource + :type key_source: str or ~azure_databricks_management_client.models.KeySource :param key_name: The name of KeyVault key. :type key_name: str :param key_version: The version of KeyVault key. @@ -99,6 +99,91 @@ def __init__( self.key_vault_uri = kwargs.get('key_vault_uri', None) +class EncryptionEntitiesDefinition(msrest.serialization.Model): + """Encryption entities for databricks workspace resource. + + :param managed_services: Encryption properties for the databricks managed services. + :type managed_services: ~azure_databricks_management_client.models.EncryptionV2 + """ + + _attribute_map = { + 'managed_services': {'key': 'managedServices', 'type': 'EncryptionV2'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionEntitiesDefinition, self).__init__(**kwargs) + self.managed_services = kwargs.get('managed_services', None) + + +class EncryptionV2(msrest.serialization.Model): + """The object that contains details of encryption used on the workspace. + + All required parameters must be populated in order to send to Azure. + + :param key_source: Required. The encryption keySource (provider). Possible values + (case-insensitive): Microsoft.Keyvault. Possible values include: "Microsoft.Keyvault". + :type key_source: str or ~azure_databricks_management_client.models.EncryptionKeySource + :param key_vault_properties: Key Vault input properties for encryption. + :type key_vault_properties: + ~azure_databricks_management_client.models.EncryptionV2KeyVaultProperties + """ + + _validation = { + 'key_source': {'required': True}, + } + + _attribute_map = { + 'key_source': {'key': 'keySource', 'type': 'str'}, + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionV2KeyVaultProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionV2, self).__init__(**kwargs) + self.key_source = kwargs['key_source'] + self.key_vault_properties = kwargs.get('key_vault_properties', None) + + +class EncryptionV2KeyVaultProperties(msrest.serialization.Model): + """Key Vault input properties for encryption. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_uri: Required. The Uri of KeyVault. + :type key_vault_uri: str + :param key_name: Required. The name of KeyVault key. + :type key_name: str + :param key_version: Required. The version of KeyVault key. + :type key_version: str + """ + + _validation = { + 'key_vault_uri': {'required': True}, + 'key_name': {'required': True}, + 'key_version': {'required': True}, + } + + _attribute_map = { + 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionV2KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_uri = kwargs['key_vault_uri'] + self.key_name = kwargs['key_name'] + self.key_version = kwargs['key_version'] + + class ErrorDetail(msrest.serialization.Model): """Error details. @@ -143,7 +228,7 @@ class ErrorInfo(msrest.serialization.Model): :param message: Required. A human readable error message. :type message: str :param details: error details. - :type details: list[~azure.mgmt.databricks.models.ErrorDetail] + :type details: list[~azure_databricks_management_client.models.ErrorDetail] :param innererror: Inner error details if they exist. :type innererror: str """ @@ -177,7 +262,7 @@ class ErrorResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param error: Required. The error details. - :type error: ~azure.mgmt.databricks.models.ErrorInfo + :type error: ~azure_databricks_management_client.models.ErrorInfo """ _validation = { @@ -196,6 +281,111 @@ def __init__( self.error = kwargs['error'] +class Resource(msrest.serialization.Model): + """The core properties of ARM resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class GroupIdInformation(Resource): + """The group information for creating a private endpoint on a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param properties: Required. The group id properties. + :type properties: ~azure_databricks_management_client.models.GroupIdInformationProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'GroupIdInformationProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(GroupIdInformation, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class GroupIdInformationProperties(msrest.serialization.Model): + """The properties for a group information object. + + :param group_id: The group id. + :type group_id: str + :param required_members: The required members for a specific group id. + :type required_members: list[str] + :param required_zone_names: The required DNS zones for a specific group id. + :type required_zone_names: list[str] + """ + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(GroupIdInformationProperties, self).__init__(**kwargs) + self.group_id = kwargs.get('group_id', None) + self.required_members = kwargs.get('required_members', None) + self.required_zone_names = kwargs.get('required_zone_names', None) + + class ManagedIdentityConfiguration(msrest.serialization.Model): """The Managed Identity details for storage account. @@ -238,7 +428,7 @@ class Operation(msrest.serialization.Model): :param name: Operation name: {provider}/{resource}/{operation}. :type name: str :param display: The object that represents the operation. - :type display: ~azure.mgmt.databricks.models.OperationDisplay + :type display: ~azure_databricks_management_client.models.OperationDisplay """ _attribute_map = { @@ -287,7 +477,7 @@ class OperationListResult(msrest.serialization.Model): :param value: List of Resource Provider operations supported by the Resource Provider resource provider. - :type value: list[~azure.mgmt.databricks.models.Operation] + :type value: list[~azure_databricks_management_client.models.Operation] :param next_link: URL to get the next set of operation list results if there are any. :type next_link: str """ @@ -306,41 +496,192 @@ def __init__( self.next_link = kwargs.get('next_link', None) -class Resource(msrest.serialization.Model): - """The core properties of ARM resources. +class PrivateEndpoint(msrest.serialization.Model): + """The private endpoint property of a private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :ivar id: The resource identifier. :vartype id: str - :ivar name: The name of the resource. + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(msrest.serialization.Model): + """The private endpoint connection of a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. + :ivar type: The resource type. :vartype type: str + :param properties: Required. The private endpoint connection properties. + :type properties: + ~azure_databricks_management_client.models.PrivateEndpointConnectionProperties """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateEndpointConnectionProperties'}, } def __init__( self, **kwargs ): - super(Resource, self).__init__(**kwargs) + super(PrivateEndpointConnection, self).__init__(**kwargs) self.id = None self.name = None self.type = None + self.properties = kwargs['properties'] + + +class PrivateEndpointConnectionProperties(msrest.serialization.Model): + """The properties of a private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param private_endpoint: Private endpoint. + :type private_endpoint: ~azure_databricks_management_client.models.PrivateEndpoint + :param private_link_service_connection_state: Required. Private endpoint connection state. + :type private_link_service_connection_state: + ~azure_databricks_management_client.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. Possible + values include: "Succeeded", "Creating", "Updating", "Deleting", "Failed". + :vartype provisioning_state: str or + ~azure_databricks_management_client.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + 'private_link_service_connection_state': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionProperties, self).__init__(**kwargs) + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs['private_link_service_connection_state'] + self.provisioning_state = None + + +class PrivateEndpointConnectionsList(msrest.serialization.Model): + """List of private link connections. + + :param value: The list of returned private endpoint connection. + :type value: list[~azure_databricks_management_client.models.PrivateEndpointConnection] + :param next_link: The URL to get the next set of endpoint connections. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnectionsList, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class PrivateLinkResourcesList(msrest.serialization.Model): + """The available private link resources for a workspace. + + :param value: The list of available private link resources for a workspace. + :type value: list[~azure_databricks_management_client.models.GroupIdInformation] + :param next_link: The URL to get the next set of private link resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[GroupIdInformation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourcesList, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """The current state of a private endpoint connection. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. The status of a private endpoint connection. Possible values include: + "Pending", "Approved", "Rejected", "Disconnected". + :type status: str or + ~azure_databricks_management_client.models.PrivateLinkServiceConnectionStatus + :param description: The description for the current state of a private endpoint connection. + :type description: str + :param action_required: Actions required for a private endpoint connection. + :type action_required: str + """ + + _validation = { + 'status': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'action_required': {'key': 'actionRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = kwargs['status'] + self.description = kwargs.get('description', None) + self.action_required = kwargs.get('action_required', None) class Sku(msrest.serialization.Model): @@ -372,6 +713,47 @@ def __init__( self.tier = kwargs.get('tier', None) +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure_databricks_management_client.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure_databricks_management_client.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + class TrackedResource(Resource): """The resource model definition for a ARM tracked top level resource. @@ -445,25 +827,26 @@ class VirtualNetworkPeering(msrest.serialization.Model): true. This flag cannot be set if virtual network already has a gateway. :type use_remote_gateways: bool :param databricks_virtual_network: The remote virtual network should be in the same region. See - here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration- - guide/cloud-configurations/azure/vnet-peering). + here to learn more + (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). :type databricks_virtual_network: - ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork + ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork :param databricks_address_space: The reference to the databricks virtual network address space. - :type databricks_address_space: ~azure.mgmt.databricks.models.AddressSpace + :type databricks_address_space: ~azure_databricks_management_client.models.AddressSpace :param remote_virtual_network: Required. The remote virtual network should be in the same - region. See here to learn more (https://docs.microsoft.com/en- - us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). + region. See here to learn more + (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). :type remote_virtual_network: - ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork + ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork :param remote_address_space: The reference to the remote virtual network address space. - :type remote_address_space: ~azure.mgmt.databricks.models.AddressSpace + :type remote_address_space: ~azure_databricks_management_client.models.AddressSpace :ivar peering_state: The status of the virtual network peering. Possible values include: "Initiated", "Connected", "Disconnected". - :vartype peering_state: str or ~azure.mgmt.databricks.models.PeeringState + :vartype peering_state: str or ~azure_databricks_management_client.models.PeeringState :ivar provisioning_state: The provisioning state of the virtual network peering resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". - :vartype provisioning_state: str or ~azure.mgmt.databricks.models.PeeringProvisioningState + :vartype provisioning_state: str or + ~azure_databricks_management_client.models.PeeringProvisioningState """ _validation = { @@ -515,7 +898,7 @@ class VirtualNetworkPeeringList(msrest.serialization.Model): """Gets all virtual network peerings under a workspace. :param value: List of virtual network peerings on workspace. - :type value: list[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :type value: list[~azure_databricks_management_client.models.VirtualNetworkPeering] :param next_link: URL to get the next set of virtual network peering list results if there are any. :type next_link: str @@ -593,25 +976,29 @@ class Workspace(TrackedResource): :param location: Required. The geo-location where the resource lives. :type location: str :param sku: The SKU of the resource. - :type sku: ~azure.mgmt.databricks.models.Sku + :type sku: ~azure_databricks_management_client.models.Sku + :ivar system_data: The system metadata relating to this resource. + :vartype system_data: ~azure_databricks_management_client.models.SystemData :param managed_resource_group_id: Required. The managed resource group Id. :type managed_resource_group_id: str :param parameters: The workspace's custom parameters. - :type parameters: ~azure.mgmt.databricks.models.WorkspaceCustomParameters + :type parameters: ~azure_databricks_management_client.models.WorkspaceCustomParameters :ivar provisioning_state: The workspace provisioning state. Possible values include: "Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled", "Failed", "Succeeded", "Updating". - :vartype provisioning_state: str or ~azure.mgmt.databricks.models.ProvisioningState + :vartype provisioning_state: str or + ~azure_databricks_management_client.models.ProvisioningState :param ui_definition_uri: The blob URI where the UI definition file is located. :type ui_definition_uri: str :param authorizations: The workspace provider authorizations. - :type authorizations: list[~azure.mgmt.databricks.models.WorkspaceProviderAuthorization] + :type authorizations: + list[~azure_databricks_management_client.models.WorkspaceProviderAuthorization] :param created_by: Indicates the Object ID, PUID and Application ID of entity that created the workspace. - :type created_by: ~azure.mgmt.databricks.models.CreatedBy + :type created_by: ~azure_databricks_management_client.models.CreatedBy :param updated_by: Indicates the Object ID, PUID and Application ID of entity that last updated the workspace. - :type updated_by: ~azure.mgmt.databricks.models.CreatedBy + :type updated_by: ~azure_databricks_management_client.models.CreatedBy :ivar created_date_time: Specifies the date and time when the workspace is created. :vartype created_date_time: ~datetime.datetime :ivar workspace_id: The unique identifier of the databricks workspace in databricks control @@ -621,7 +1008,23 @@ class Workspace(TrackedResource): 'adb-{workspaceId}.{random}.azuredatabricks.net'. :vartype workspace_url: str :param storage_account_identity: The details of Managed Identity of Storage Account. - :type storage_account_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration + :type storage_account_identity: + ~azure_databricks_management_client.models.ManagedIdentityConfiguration + :param encryption: Encryption properties for databricks workspace. + :type encryption: ~azure_databricks_management_client.models.WorkspacePropertiesEncryption + :ivar private_endpoint_connections: Private endpoint connections created on the workspace. + :vartype private_endpoint_connections: + list[~azure_databricks_management_client.models.PrivateEndpointConnection] + :param public_network_access: The network access type for accessing workspace. Set value to + disabled to access workspace only via private link. Possible values include: "Enabled", + "Disabled". + :type public_network_access: str or + ~azure_databricks_management_client.models.PublicNetworkAccess + :param required_nsg_rules: Gets or sets a value indicating whether data plane (clusters) to + control plane communication happen over private endpoint. Supported values are 'AllRules' and + 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Possible values + include: "AllRules", "NoAzureDatabricksRules", "NoAzureServiceRules". + :type required_nsg_rules: str or ~azure_databricks_management_client.models.RequiredNsgRules """ _validation = { @@ -629,11 +1032,13 @@ class Workspace(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, + 'system_data': {'readonly': True}, 'managed_resource_group_id': {'required': True}, 'provisioning_state': {'readonly': True}, 'created_date_time': {'readonly': True}, 'workspace_id': {'readonly': True}, 'workspace_url': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, } _attribute_map = { @@ -643,6 +1048,7 @@ class Workspace(TrackedResource): 'tags': {'key': 'tags', 'type': '{str}'}, 'location': {'key': 'location', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'managed_resource_group_id': {'key': 'properties.managedResourceGroupId', 'type': 'str'}, 'parameters': {'key': 'properties.parameters', 'type': 'WorkspaceCustomParameters'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, @@ -654,6 +1060,10 @@ class Workspace(TrackedResource): 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, 'workspace_url': {'key': 'properties.workspaceUrl', 'type': 'str'}, 'storage_account_identity': {'key': 'properties.storageAccountIdentity', 'type': 'ManagedIdentityConfiguration'}, + 'encryption': {'key': 'properties.encryption', 'type': 'WorkspacePropertiesEncryption'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'required_nsg_rules': {'key': 'properties.requiredNsgRules', 'type': 'str'}, } def __init__( @@ -662,6 +1072,7 @@ def __init__( ): super(Workspace, self).__init__(**kwargs) self.sku = kwargs.get('sku', None) + self.system_data = None self.managed_resource_group_id = kwargs['managed_resource_group_id'] self.parameters = kwargs.get('parameters', None) self.provisioning_state = None @@ -673,6 +1084,10 @@ def __init__( self.workspace_id = None self.workspace_url = None self.storage_account_identity = kwargs.get('storage_account_identity', None) + self.encryption = kwargs.get('encryption', None) + self.private_endpoint_connections = None + self.public_network_access = kwargs.get('public_network_access', None) + self.required_nsg_rules = kwargs.get('required_nsg_rules', None) class WorkspaceCustomBooleanParameter(msrest.serialization.Model): @@ -684,7 +1099,7 @@ class WorkspaceCustomBooleanParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: Required. The value which should be used for this field. :type value: bool """ @@ -717,9 +1132,9 @@ class WorkspaceCustomObjectParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: Required. The value which should be used for this field. - :type value: object + :type value: any """ _validation = { @@ -744,40 +1159,89 @@ def __init__( class WorkspaceCustomParameters(msrest.serialization.Model): """Custom Parameters used for Cluster Creation. + Variables are only populated by the server, and will be ignored when sending a request. + :param aml_workspace_id: The ID of a Azure Machine Learning workspace to link with Databricks workspace. - :type aml_workspace_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type aml_workspace_id: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param custom_virtual_network_id: The ID of a Virtual Network where this Databricks Cluster should be created. - :type custom_virtual_network_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type custom_virtual_network_id: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param custom_public_subnet_name: The name of a Public Subnet within the Virtual Network. - :type custom_public_subnet_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type custom_public_subnet_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param custom_private_subnet_name: The name of the Private Subnet within the Virtual Network. - :type custom_private_subnet_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type custom_private_subnet_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param enable_no_public_ip: Should the Public IP be Disabled?. - :type enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :type enable_no_public_ip: + ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter + :param load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for + Secure Cluster Connectivity (No Public IP). + :type load_balancer_backend_pool_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param load_balancer_id: Resource URI of Outbound Load balancer for Secure Cluster Connectivity + (No Public IP) workspace. + :type load_balancer_id: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param nat_gateway_name: Name of the NAT gateway for Secure Cluster Connectivity (No Public IP) + workspace subnets. + :type nat_gateway_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param public_ip_name: Name of the Public IP for No Public IP workspace with managed vNet. + :type public_ip_name: ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param prepare_encryption: Prepare the workspace for encryption. Enables the Managed Identity for managed storage account. - :type prepare_encryption: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :type prepare_encryption: + ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter :param encryption: Contains the encryption details for Customer-Managed Key (CMK) enabled workspace. - :type encryption: ~azure.mgmt.databricks.models.WorkspaceEncryptionParameter + :type encryption: ~azure_databricks_management_client.models.WorkspaceEncryptionParameter :param require_infrastructure_encryption: A boolean indicating whether or not the DBFS root file system will be enabled with secondary layer of encryption with platform managed keys for data at rest. :type require_infrastructure_encryption: - ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter + :param storage_account_name: Default DBFS storage account name. + :type storage_account_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param storage_account_sku_name: Storage account SKU name, ex: Standard_GRS, Standard_LRS. + Refer https://aka.ms/storageskus for valid inputs. + :type storage_account_sku_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param vnet_address_prefix: Address prefix for Managed virtual network. Default value for this + input is 10.139. + :type vnet_address_prefix: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :ivar resource_tags: Tags applied to resources under Managed resource group. These can be + updated by updating tags at workspace level. + :vartype resource_tags: + ~azure_databricks_management_client.models.WorkspaceCustomObjectParameter """ + _validation = { + 'resource_tags': {'readonly': True}, + } + _attribute_map = { 'aml_workspace_id': {'key': 'amlWorkspaceId', 'type': 'WorkspaceCustomStringParameter'}, 'custom_virtual_network_id': {'key': 'customVirtualNetworkId', 'type': 'WorkspaceCustomStringParameter'}, 'custom_public_subnet_name': {'key': 'customPublicSubnetName', 'type': 'WorkspaceCustomStringParameter'}, 'custom_private_subnet_name': {'key': 'customPrivateSubnetName', 'type': 'WorkspaceCustomStringParameter'}, 'enable_no_public_ip': {'key': 'enableNoPublicIp', 'type': 'WorkspaceCustomBooleanParameter'}, + 'load_balancer_backend_pool_name': {'key': 'loadBalancerBackendPoolName', 'type': 'WorkspaceCustomStringParameter'}, + 'load_balancer_id': {'key': 'loadBalancerId', 'type': 'WorkspaceCustomStringParameter'}, + 'nat_gateway_name': {'key': 'natGatewayName', 'type': 'WorkspaceCustomStringParameter'}, + 'public_ip_name': {'key': 'publicIpName', 'type': 'WorkspaceCustomStringParameter'}, 'prepare_encryption': {'key': 'prepareEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, 'encryption': {'key': 'encryption', 'type': 'WorkspaceEncryptionParameter'}, 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, + 'storage_account_name': {'key': 'storageAccountName', 'type': 'WorkspaceCustomStringParameter'}, + 'storage_account_sku_name': {'key': 'storageAccountSkuName', 'type': 'WorkspaceCustomStringParameter'}, + 'vnet_address_prefix': {'key': 'vnetAddressPrefix', 'type': 'WorkspaceCustomStringParameter'}, + 'resource_tags': {'key': 'resourceTags', 'type': 'WorkspaceCustomObjectParameter'}, } def __init__( @@ -790,9 +1254,17 @@ def __init__( self.custom_public_subnet_name = kwargs.get('custom_public_subnet_name', None) self.custom_private_subnet_name = kwargs.get('custom_private_subnet_name', None) self.enable_no_public_ip = kwargs.get('enable_no_public_ip', None) + self.load_balancer_backend_pool_name = kwargs.get('load_balancer_backend_pool_name', None) + self.load_balancer_id = kwargs.get('load_balancer_id', None) + self.nat_gateway_name = kwargs.get('nat_gateway_name', None) + self.public_ip_name = kwargs.get('public_ip_name', None) self.prepare_encryption = kwargs.get('prepare_encryption', None) self.encryption = kwargs.get('encryption', None) self.require_infrastructure_encryption = kwargs.get('require_infrastructure_encryption', None) + self.storage_account_name = kwargs.get('storage_account_name', None) + self.storage_account_sku_name = kwargs.get('storage_account_sku_name', None) + self.vnet_address_prefix = kwargs.get('vnet_address_prefix', None) + self.resource_tags = None class WorkspaceCustomStringParameter(msrest.serialization.Model): @@ -804,7 +1276,7 @@ class WorkspaceCustomStringParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: Required. The value which should be used for this field. :type value: str """ @@ -835,9 +1307,9 @@ class WorkspaceEncryptionParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: The value which should be used for this field. - :type value: ~azure.mgmt.databricks.models.Encryption + :type value: ~azure_databricks_management_client.models.Encryption """ _validation = { @@ -862,7 +1334,7 @@ class WorkspaceListResult(msrest.serialization.Model): """List of workspaces. :param value: The array of workspaces. - :type value: list[~azure.mgmt.databricks.models.Workspace] + :type value: list[~azure_databricks_management_client.models.Workspace] :param next_link: The URL to use for getting the next set of results. :type next_link: str """ @@ -881,6 +1353,31 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class WorkspacePropertiesEncryption(msrest.serialization.Model): + """Encryption properties for databricks workspace. + + All required parameters must be populated in order to send to Azure. + + :param entities: Required. Encryption entities definition for the workspace. + :type entities: ~azure_databricks_management_client.models.EncryptionEntitiesDefinition + """ + + _validation = { + 'entities': {'required': True}, + } + + _attribute_map = { + 'entities': {'key': 'entities', 'type': 'EncryptionEntitiesDefinition'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspacePropertiesEncryption, self).__init__(**kwargs) + self.entities = kwargs['entities'] + + class WorkspaceProviderAuthorization(msrest.serialization.Model): """The workspace provider authorization. diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py index 95b0b518d8ce..e8e7c355d599 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py @@ -6,12 +6,13 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Dict, List, Optional, Union +import datetime +from typing import Any, Dict, List, Optional, Union from azure.core.exceptions import HttpResponseError import msrest.serialization -from ._databricks_client_enums import * +from ._azure_databricks_management_client_enums import * class AddressSpace(msrest.serialization.Model): @@ -78,7 +79,7 @@ class Encryption(msrest.serialization.Model): :param key_source: The encryption keySource (provider). Possible values (case-insensitive): Default, Microsoft.Keyvault. Possible values include: "Default", "Microsoft.Keyvault". Default value: "Default". - :type key_source: str or ~azure.mgmt.databricks.models.KeySource + :type key_source: str or ~azure_databricks_management_client.models.KeySource :param key_name: The name of KeyVault key. :type key_name: str :param key_version: The version of KeyVault key. @@ -110,6 +111,100 @@ def __init__( self.key_vault_uri = key_vault_uri +class EncryptionEntitiesDefinition(msrest.serialization.Model): + """Encryption entities for databricks workspace resource. + + :param managed_services: Encryption properties for the databricks managed services. + :type managed_services: ~azure_databricks_management_client.models.EncryptionV2 + """ + + _attribute_map = { + 'managed_services': {'key': 'managedServices', 'type': 'EncryptionV2'}, + } + + def __init__( + self, + *, + managed_services: Optional["EncryptionV2"] = None, + **kwargs + ): + super(EncryptionEntitiesDefinition, self).__init__(**kwargs) + self.managed_services = managed_services + + +class EncryptionV2(msrest.serialization.Model): + """The object that contains details of encryption used on the workspace. + + All required parameters must be populated in order to send to Azure. + + :param key_source: Required. The encryption keySource (provider). Possible values + (case-insensitive): Microsoft.Keyvault. Possible values include: "Microsoft.Keyvault". + :type key_source: str or ~azure_databricks_management_client.models.EncryptionKeySource + :param key_vault_properties: Key Vault input properties for encryption. + :type key_vault_properties: + ~azure_databricks_management_client.models.EncryptionV2KeyVaultProperties + """ + + _validation = { + 'key_source': {'required': True}, + } + + _attribute_map = { + 'key_source': {'key': 'keySource', 'type': 'str'}, + 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionV2KeyVaultProperties'}, + } + + def __init__( + self, + *, + key_source: Union[str, "EncryptionKeySource"], + key_vault_properties: Optional["EncryptionV2KeyVaultProperties"] = None, + **kwargs + ): + super(EncryptionV2, self).__init__(**kwargs) + self.key_source = key_source + self.key_vault_properties = key_vault_properties + + +class EncryptionV2KeyVaultProperties(msrest.serialization.Model): + """Key Vault input properties for encryption. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_uri: Required. The Uri of KeyVault. + :type key_vault_uri: str + :param key_name: Required. The name of KeyVault key. + :type key_name: str + :param key_version: Required. The version of KeyVault key. + :type key_version: str + """ + + _validation = { + 'key_vault_uri': {'required': True}, + 'key_name': {'required': True}, + 'key_version': {'required': True}, + } + + _attribute_map = { + 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + } + + def __init__( + self, + *, + key_vault_uri: str, + key_name: str, + key_version: str, + **kwargs + ): + super(EncryptionV2KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_uri = key_vault_uri + self.key_name = key_name + self.key_version = key_version + + class ErrorDetail(msrest.serialization.Model): """Error details. @@ -158,7 +253,7 @@ class ErrorInfo(msrest.serialization.Model): :param message: Required. A human readable error message. :type message: str :param details: error details. - :type details: list[~azure.mgmt.databricks.models.ErrorDetail] + :type details: list[~azure_databricks_management_client.models.ErrorDetail] :param innererror: Inner error details if they exist. :type innererror: str """ @@ -197,7 +292,7 @@ class ErrorResponse(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. :param error: Required. The error details. - :type error: ~azure.mgmt.databricks.models.ErrorInfo + :type error: ~azure_databricks_management_client.models.ErrorInfo """ _validation = { @@ -218,6 +313,117 @@ def __init__( self.error = error +class Resource(msrest.serialization.Model): + """The core properties of ARM resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class GroupIdInformation(Resource): + """The group information for creating a private endpoint on a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :param properties: Required. The group id properties. + :type properties: ~azure_databricks_management_client.models.GroupIdInformationProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'GroupIdInformationProperties'}, + } + + def __init__( + self, + *, + properties: "GroupIdInformationProperties", + **kwargs + ): + super(GroupIdInformation, self).__init__(**kwargs) + self.properties = properties + + +class GroupIdInformationProperties(msrest.serialization.Model): + """The properties for a group information object. + + :param group_id: The group id. + :type group_id: str + :param required_members: The required members for a specific group id. + :type required_members: list[str] + :param required_zone_names: The required DNS zones for a specific group id. + :type required_zone_names: list[str] + """ + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + *, + group_id: Optional[str] = None, + required_members: Optional[List[str]] = None, + required_zone_names: Optional[List[str]] = None, + **kwargs + ): + super(GroupIdInformationProperties, self).__init__(**kwargs) + self.group_id = group_id + self.required_members = required_members + self.required_zone_names = required_zone_names + + class ManagedIdentityConfiguration(msrest.serialization.Model): """The Managed Identity details for storage account. @@ -260,7 +466,7 @@ class Operation(msrest.serialization.Model): :param name: Operation name: {provider}/{resource}/{operation}. :type name: str :param display: The object that represents the operation. - :type display: ~azure.mgmt.databricks.models.OperationDisplay + :type display: ~azure_databricks_management_client.models.OperationDisplay """ _attribute_map = { @@ -316,7 +522,7 @@ class OperationListResult(msrest.serialization.Model): :param value: List of Resource Provider operations supported by the Resource Provider resource provider. - :type value: list[~azure.mgmt.databricks.models.Operation] + :type value: list[~azure_databricks_management_client.models.Operation] :param next_link: URL to get the next set of operation list results if there are any. :type next_link: str """ @@ -338,41 +544,207 @@ def __init__( self.next_link = next_link -class Resource(msrest.serialization.Model): - """The core properties of ARM resources. +class PrivateEndpoint(msrest.serialization.Model): + """The private endpoint property of a private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :ivar id: The resource identifier. :vartype id: str - :ivar name: The name of the resource. + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(msrest.serialization.Model): + """The private endpoint connection of a workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. + :ivar type: The resource type. :vartype type: str + :param properties: Required. The private endpoint connection properties. + :type properties: + ~azure_databricks_management_client.models.PrivateEndpointConnectionProperties """ _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateEndpointConnectionProperties'}, } def __init__( self, + *, + properties: "PrivateEndpointConnectionProperties", **kwargs ): - super(Resource, self).__init__(**kwargs) + super(PrivateEndpointConnection, self).__init__(**kwargs) self.id = None self.name = None self.type = None + self.properties = properties + + +class PrivateEndpointConnectionProperties(msrest.serialization.Model): + """The properties of a private endpoint connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param private_endpoint: Private endpoint. + :type private_endpoint: ~azure_databricks_management_client.models.PrivateEndpoint + :param private_link_service_connection_state: Required. Private endpoint connection state. + :type private_link_service_connection_state: + ~azure_databricks_management_client.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. Possible + values include: "Succeeded", "Creating", "Updating", "Deleting", "Failed". + :vartype provisioning_state: str or + ~azure_databricks_management_client.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + 'private_link_service_connection_state': {'required': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + private_link_service_connection_state: "PrivateLinkServiceConnectionState", + private_endpoint: Optional["PrivateEndpoint"] = None, + **kwargs + ): + super(PrivateEndpointConnectionProperties, self).__init__(**kwargs) + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None + + +class PrivateEndpointConnectionsList(msrest.serialization.Model): + """List of private link connections. + + :param value: The list of returned private endpoint connection. + :type value: list[~azure_databricks_management_client.models.PrivateEndpointConnection] + :param next_link: The URL to get the next set of endpoint connections. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["PrivateEndpointConnection"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PrivateEndpointConnectionsList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PrivateLinkResourcesList(msrest.serialization.Model): + """The available private link resources for a workspace. + + :param value: The list of available private link resources for a workspace. + :type value: list[~azure_databricks_management_client.models.GroupIdInformation] + :param next_link: The URL to get the next set of private link resources. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[GroupIdInformation]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["GroupIdInformation"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PrivateLinkResourcesList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """The current state of a private endpoint connection. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. The status of a private endpoint connection. Possible values include: + "Pending", "Approved", "Rejected", "Disconnected". + :type status: str or + ~azure_databricks_management_client.models.PrivateLinkServiceConnectionStatus + :param description: The description for the current state of a private endpoint connection. + :type description: str + :param action_required: Actions required for a private endpoint connection. + :type action_required: str + """ + + _validation = { + 'status': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'action_required': {'key': 'actionRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Union[str, "PrivateLinkServiceConnectionStatus"], + description: Optional[str] = None, + action_required: Optional[str] = None, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.action_required = action_required class Sku(msrest.serialization.Model): @@ -407,6 +779,54 @@ def __init__( self.tier = tier +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~azure_databricks_management_client.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~azure_databricks_management_client.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + class TrackedResource(Resource): """The resource model definition for a ARM tracked top level resource. @@ -483,25 +903,26 @@ class VirtualNetworkPeering(msrest.serialization.Model): true. This flag cannot be set if virtual network already has a gateway. :type use_remote_gateways: bool :param databricks_virtual_network: The remote virtual network should be in the same region. See - here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration- - guide/cloud-configurations/azure/vnet-peering). + here to learn more + (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). :type databricks_virtual_network: - ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork + ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork :param databricks_address_space: The reference to the databricks virtual network address space. - :type databricks_address_space: ~azure.mgmt.databricks.models.AddressSpace + :type databricks_address_space: ~azure_databricks_management_client.models.AddressSpace :param remote_virtual_network: Required. The remote virtual network should be in the same - region. See here to learn more (https://docs.microsoft.com/en- - us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). + region. See here to learn more + (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). :type remote_virtual_network: - ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork + ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork :param remote_address_space: The reference to the remote virtual network address space. - :type remote_address_space: ~azure.mgmt.databricks.models.AddressSpace + :type remote_address_space: ~azure_databricks_management_client.models.AddressSpace :ivar peering_state: The status of the virtual network peering. Possible values include: "Initiated", "Connected", "Disconnected". - :vartype peering_state: str or ~azure.mgmt.databricks.models.PeeringState + :vartype peering_state: str or ~azure_databricks_management_client.models.PeeringState :ivar provisioning_state: The provisioning state of the virtual network peering resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". - :vartype provisioning_state: str or ~azure.mgmt.databricks.models.PeeringProvisioningState + :vartype provisioning_state: str or + ~azure_databricks_management_client.models.PeeringProvisioningState """ _validation = { @@ -562,7 +983,7 @@ class VirtualNetworkPeeringList(msrest.serialization.Model): """Gets all virtual network peerings under a workspace. :param value: List of virtual network peerings on workspace. - :type value: list[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :type value: list[~azure_databricks_management_client.models.VirtualNetworkPeering] :param next_link: URL to get the next set of virtual network peering list results if there are any. :type next_link: str @@ -647,25 +1068,29 @@ class Workspace(TrackedResource): :param location: Required. The geo-location where the resource lives. :type location: str :param sku: The SKU of the resource. - :type sku: ~azure.mgmt.databricks.models.Sku + :type sku: ~azure_databricks_management_client.models.Sku + :ivar system_data: The system metadata relating to this resource. + :vartype system_data: ~azure_databricks_management_client.models.SystemData :param managed_resource_group_id: Required. The managed resource group Id. :type managed_resource_group_id: str :param parameters: The workspace's custom parameters. - :type parameters: ~azure.mgmt.databricks.models.WorkspaceCustomParameters + :type parameters: ~azure_databricks_management_client.models.WorkspaceCustomParameters :ivar provisioning_state: The workspace provisioning state. Possible values include: "Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled", "Failed", "Succeeded", "Updating". - :vartype provisioning_state: str or ~azure.mgmt.databricks.models.ProvisioningState + :vartype provisioning_state: str or + ~azure_databricks_management_client.models.ProvisioningState :param ui_definition_uri: The blob URI where the UI definition file is located. :type ui_definition_uri: str :param authorizations: The workspace provider authorizations. - :type authorizations: list[~azure.mgmt.databricks.models.WorkspaceProviderAuthorization] + :type authorizations: + list[~azure_databricks_management_client.models.WorkspaceProviderAuthorization] :param created_by: Indicates the Object ID, PUID and Application ID of entity that created the workspace. - :type created_by: ~azure.mgmt.databricks.models.CreatedBy + :type created_by: ~azure_databricks_management_client.models.CreatedBy :param updated_by: Indicates the Object ID, PUID and Application ID of entity that last updated the workspace. - :type updated_by: ~azure.mgmt.databricks.models.CreatedBy + :type updated_by: ~azure_databricks_management_client.models.CreatedBy :ivar created_date_time: Specifies the date and time when the workspace is created. :vartype created_date_time: ~datetime.datetime :ivar workspace_id: The unique identifier of the databricks workspace in databricks control @@ -675,7 +1100,23 @@ class Workspace(TrackedResource): 'adb-{workspaceId}.{random}.azuredatabricks.net'. :vartype workspace_url: str :param storage_account_identity: The details of Managed Identity of Storage Account. - :type storage_account_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration + :type storage_account_identity: + ~azure_databricks_management_client.models.ManagedIdentityConfiguration + :param encryption: Encryption properties for databricks workspace. + :type encryption: ~azure_databricks_management_client.models.WorkspacePropertiesEncryption + :ivar private_endpoint_connections: Private endpoint connections created on the workspace. + :vartype private_endpoint_connections: + list[~azure_databricks_management_client.models.PrivateEndpointConnection] + :param public_network_access: The network access type for accessing workspace. Set value to + disabled to access workspace only via private link. Possible values include: "Enabled", + "Disabled". + :type public_network_access: str or + ~azure_databricks_management_client.models.PublicNetworkAccess + :param required_nsg_rules: Gets or sets a value indicating whether data plane (clusters) to + control plane communication happen over private endpoint. Supported values are 'AllRules' and + 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Possible values + include: "AllRules", "NoAzureDatabricksRules", "NoAzureServiceRules". + :type required_nsg_rules: str or ~azure_databricks_management_client.models.RequiredNsgRules """ _validation = { @@ -683,11 +1124,13 @@ class Workspace(TrackedResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, + 'system_data': {'readonly': True}, 'managed_resource_group_id': {'required': True}, 'provisioning_state': {'readonly': True}, 'created_date_time': {'readonly': True}, 'workspace_id': {'readonly': True}, 'workspace_url': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, } _attribute_map = { @@ -697,6 +1140,7 @@ class Workspace(TrackedResource): 'tags': {'key': 'tags', 'type': '{str}'}, 'location': {'key': 'location', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'Sku'}, + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, 'managed_resource_group_id': {'key': 'properties.managedResourceGroupId', 'type': 'str'}, 'parameters': {'key': 'properties.parameters', 'type': 'WorkspaceCustomParameters'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, @@ -708,6 +1152,10 @@ class Workspace(TrackedResource): 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, 'workspace_url': {'key': 'properties.workspaceUrl', 'type': 'str'}, 'storage_account_identity': {'key': 'properties.storageAccountIdentity', 'type': 'ManagedIdentityConfiguration'}, + 'encryption': {'key': 'properties.encryption', 'type': 'WorkspacePropertiesEncryption'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, + 'required_nsg_rules': {'key': 'properties.requiredNsgRules', 'type': 'str'}, } def __init__( @@ -723,10 +1171,14 @@ def __init__( created_by: Optional["CreatedBy"] = None, updated_by: Optional["CreatedBy"] = None, storage_account_identity: Optional["ManagedIdentityConfiguration"] = None, + encryption: Optional["WorkspacePropertiesEncryption"] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, + required_nsg_rules: Optional[Union[str, "RequiredNsgRules"]] = None, **kwargs ): super(Workspace, self).__init__(tags=tags, location=location, **kwargs) self.sku = sku + self.system_data = None self.managed_resource_group_id = managed_resource_group_id self.parameters = parameters self.provisioning_state = None @@ -738,6 +1190,10 @@ def __init__( self.workspace_id = None self.workspace_url = None self.storage_account_identity = storage_account_identity + self.encryption = encryption + self.private_endpoint_connections = None + self.public_network_access = public_network_access + self.required_nsg_rules = required_nsg_rules class WorkspaceCustomBooleanParameter(msrest.serialization.Model): @@ -749,7 +1205,7 @@ class WorkspaceCustomBooleanParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: Required. The value which should be used for this field. :type value: bool """ @@ -784,9 +1240,9 @@ class WorkspaceCustomObjectParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: Required. The value which should be used for this field. - :type value: object + :type value: any """ _validation = { @@ -802,7 +1258,7 @@ class WorkspaceCustomObjectParameter(msrest.serialization.Model): def __init__( self, *, - value: object, + value: Any, **kwargs ): super(WorkspaceCustomObjectParameter, self).__init__(**kwargs) @@ -813,40 +1269,89 @@ def __init__( class WorkspaceCustomParameters(msrest.serialization.Model): """Custom Parameters used for Cluster Creation. + Variables are only populated by the server, and will be ignored when sending a request. + :param aml_workspace_id: The ID of a Azure Machine Learning workspace to link with Databricks workspace. - :type aml_workspace_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type aml_workspace_id: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param custom_virtual_network_id: The ID of a Virtual Network where this Databricks Cluster should be created. - :type custom_virtual_network_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type custom_virtual_network_id: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param custom_public_subnet_name: The name of a Public Subnet within the Virtual Network. - :type custom_public_subnet_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type custom_public_subnet_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param custom_private_subnet_name: The name of the Private Subnet within the Virtual Network. - :type custom_private_subnet_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :type custom_private_subnet_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param enable_no_public_ip: Should the Public IP be Disabled?. - :type enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :type enable_no_public_ip: + ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter + :param load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for + Secure Cluster Connectivity (No Public IP). + :type load_balancer_backend_pool_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param load_balancer_id: Resource URI of Outbound Load balancer for Secure Cluster Connectivity + (No Public IP) workspace. + :type load_balancer_id: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param nat_gateway_name: Name of the NAT gateway for Secure Cluster Connectivity (No Public IP) + workspace subnets. + :type nat_gateway_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param public_ip_name: Name of the Public IP for No Public IP workspace with managed vNet. + :type public_ip_name: ~azure_databricks_management_client.models.WorkspaceCustomStringParameter :param prepare_encryption: Prepare the workspace for encryption. Enables the Managed Identity for managed storage account. - :type prepare_encryption: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :type prepare_encryption: + ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter :param encryption: Contains the encryption details for Customer-Managed Key (CMK) enabled workspace. - :type encryption: ~azure.mgmt.databricks.models.WorkspaceEncryptionParameter + :type encryption: ~azure_databricks_management_client.models.WorkspaceEncryptionParameter :param require_infrastructure_encryption: A boolean indicating whether or not the DBFS root file system will be enabled with secondary layer of encryption with platform managed keys for data at rest. :type require_infrastructure_encryption: - ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter + :param storage_account_name: Default DBFS storage account name. + :type storage_account_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param storage_account_sku_name: Storage account SKU name, ex: Standard_GRS, Standard_LRS. + Refer https://aka.ms/storageskus for valid inputs. + :type storage_account_sku_name: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :param vnet_address_prefix: Address prefix for Managed virtual network. Default value for this + input is 10.139. + :type vnet_address_prefix: + ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :ivar resource_tags: Tags applied to resources under Managed resource group. These can be + updated by updating tags at workspace level. + :vartype resource_tags: + ~azure_databricks_management_client.models.WorkspaceCustomObjectParameter """ + _validation = { + 'resource_tags': {'readonly': True}, + } + _attribute_map = { 'aml_workspace_id': {'key': 'amlWorkspaceId', 'type': 'WorkspaceCustomStringParameter'}, 'custom_virtual_network_id': {'key': 'customVirtualNetworkId', 'type': 'WorkspaceCustomStringParameter'}, 'custom_public_subnet_name': {'key': 'customPublicSubnetName', 'type': 'WorkspaceCustomStringParameter'}, 'custom_private_subnet_name': {'key': 'customPrivateSubnetName', 'type': 'WorkspaceCustomStringParameter'}, 'enable_no_public_ip': {'key': 'enableNoPublicIp', 'type': 'WorkspaceCustomBooleanParameter'}, + 'load_balancer_backend_pool_name': {'key': 'loadBalancerBackendPoolName', 'type': 'WorkspaceCustomStringParameter'}, + 'load_balancer_id': {'key': 'loadBalancerId', 'type': 'WorkspaceCustomStringParameter'}, + 'nat_gateway_name': {'key': 'natGatewayName', 'type': 'WorkspaceCustomStringParameter'}, + 'public_ip_name': {'key': 'publicIpName', 'type': 'WorkspaceCustomStringParameter'}, 'prepare_encryption': {'key': 'prepareEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, 'encryption': {'key': 'encryption', 'type': 'WorkspaceEncryptionParameter'}, 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, + 'storage_account_name': {'key': 'storageAccountName', 'type': 'WorkspaceCustomStringParameter'}, + 'storage_account_sku_name': {'key': 'storageAccountSkuName', 'type': 'WorkspaceCustomStringParameter'}, + 'vnet_address_prefix': {'key': 'vnetAddressPrefix', 'type': 'WorkspaceCustomStringParameter'}, + 'resource_tags': {'key': 'resourceTags', 'type': 'WorkspaceCustomObjectParameter'}, } def __init__( @@ -857,9 +1362,16 @@ def __init__( custom_public_subnet_name: Optional["WorkspaceCustomStringParameter"] = None, custom_private_subnet_name: Optional["WorkspaceCustomStringParameter"] = None, enable_no_public_ip: Optional["WorkspaceCustomBooleanParameter"] = None, + load_balancer_backend_pool_name: Optional["WorkspaceCustomStringParameter"] = None, + load_balancer_id: Optional["WorkspaceCustomStringParameter"] = None, + nat_gateway_name: Optional["WorkspaceCustomStringParameter"] = None, + public_ip_name: Optional["WorkspaceCustomStringParameter"] = None, prepare_encryption: Optional["WorkspaceCustomBooleanParameter"] = None, encryption: Optional["WorkspaceEncryptionParameter"] = None, require_infrastructure_encryption: Optional["WorkspaceCustomBooleanParameter"] = None, + storage_account_name: Optional["WorkspaceCustomStringParameter"] = None, + storage_account_sku_name: Optional["WorkspaceCustomStringParameter"] = None, + vnet_address_prefix: Optional["WorkspaceCustomStringParameter"] = None, **kwargs ): super(WorkspaceCustomParameters, self).__init__(**kwargs) @@ -868,9 +1380,17 @@ def __init__( self.custom_public_subnet_name = custom_public_subnet_name self.custom_private_subnet_name = custom_private_subnet_name self.enable_no_public_ip = enable_no_public_ip + self.load_balancer_backend_pool_name = load_balancer_backend_pool_name + self.load_balancer_id = load_balancer_id + self.nat_gateway_name = nat_gateway_name + self.public_ip_name = public_ip_name self.prepare_encryption = prepare_encryption self.encryption = encryption self.require_infrastructure_encryption = require_infrastructure_encryption + self.storage_account_name = storage_account_name + self.storage_account_sku_name = storage_account_sku_name + self.vnet_address_prefix = vnet_address_prefix + self.resource_tags = None class WorkspaceCustomStringParameter(msrest.serialization.Model): @@ -882,7 +1402,7 @@ class WorkspaceCustomStringParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: Required. The value which should be used for this field. :type value: str """ @@ -915,9 +1435,9 @@ class WorkspaceEncryptionParameter(msrest.serialization.Model): :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", "String". - :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType :param value: The value which should be used for this field. - :type value: ~azure.mgmt.databricks.models.Encryption + :type value: ~azure_databricks_management_client.models.Encryption """ _validation = { @@ -944,7 +1464,7 @@ class WorkspaceListResult(msrest.serialization.Model): """List of workspaces. :param value: The array of workspaces. - :type value: list[~azure.mgmt.databricks.models.Workspace] + :type value: list[~azure_databricks_management_client.models.Workspace] :param next_link: The URL to use for getting the next set of results. :type next_link: str """ @@ -966,6 +1486,33 @@ def __init__( self.next_link = next_link +class WorkspacePropertiesEncryption(msrest.serialization.Model): + """Encryption properties for databricks workspace. + + All required parameters must be populated in order to send to Azure. + + :param entities: Required. Encryption entities definition for the workspace. + :type entities: ~azure_databricks_management_client.models.EncryptionEntitiesDefinition + """ + + _validation = { + 'entities': {'required': True}, + } + + _attribute_map = { + 'entities': {'key': 'entities', 'type': 'EncryptionEntitiesDefinition'}, + } + + def __init__( + self, + *, + entities: "EncryptionEntitiesDefinition", + **kwargs + ): + super(WorkspacePropertiesEncryption, self).__init__(**kwargs) + self.entities = entities + + class WorkspaceProviderAuthorization(msrest.serialization.Model): """The workspace provider authorization. diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py index 7688e37c6383..6fc97620650a 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py @@ -7,11 +7,15 @@ # -------------------------------------------------------------------------- from ._workspaces_operations import WorkspacesOperations -from ._vnet_peering_operations import VNetPeeringOperations from ._operations import Operations +from ._private_link_resources_operations import PrivateLinkResourcesOperations +from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._vnet_peering_operations import VNetPeeringOperations __all__ = [ 'WorkspacesOperations', - 'VNetPeeringOperations', 'Operations', + 'PrivateLinkResourcesOperations', + 'PrivateEndpointConnectionsOperations', + 'VNetPeeringOperations', ] diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py index 19aa0562dc18..311dc36ebf05 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py @@ -30,7 +30,7 @@ class Operations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databricks.models + :type models: ~azure_databricks_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -54,7 +54,7 @@ def list( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.OperationListResult] + :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.OperationListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] @@ -62,7 +62,7 @@ def list( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -98,7 +98,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..9d91ad76eb51 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py @@ -0,0 +1,452 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionsOperations(object): + """PrivateEndpointConnectionsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_databricks_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PrivateEndpointConnectionsList"] + """List private endpoint connections. + + List private endpoint connections of the workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateEndpointConnectionsList or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.PrivateEndpointConnectionsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionsList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnectionsList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnection" + """Get private endpoint connection. + + Get a private endpoint connection properties for a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure_databricks_management_client.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def _create_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + private_endpoint_connection, # type: "_models.PrivateEndpointConnection" + **kwargs # type: Any + ): + # type: (...) -> "_models.PrivateEndpointConnection" + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._create_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(private_endpoint_connection, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if response.status_code == 202: + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def begin_create( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + private_endpoint_connection, # type: "_models.PrivateEndpointConnection" + **kwargs # type: Any + ): + # type: (...) -> LROPoller["_models.PrivateEndpointConnection"] + """Update private endpoint connection status. + + Update the status of a private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. + :type private_endpoint_connection_name: str + :param private_endpoint_connection: The private endpoint connection with updated properties. + :type private_endpoint_connection: ~azure_databricks_management_client.models.PrivateEndpointConnection + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + private_endpoint_connection=private_endpoint_connection, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Remove private endpoint connection. + + Remove private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..15d242800838 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py @@ -0,0 +1,190 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_databricks_management_client.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.PrivateLinkResourcesList"] + """List private link resources. + + List private link resources for a given workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PrivateLinkResourcesList or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.PrivateLinkResourcesList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesList"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PrivateLinkResourcesList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + group_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.GroupIdInformation" + """Get the specified private link resource. + + Get the specified private link resource for the given group id (sub-resource). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + :type resource_group_name: str + :param workspace_name: The name of the workspace. + :type workspace_name: str + :param group_id: The name of the private link resource. + :type group_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: GroupIdInformation, or the result of cls(response) + :rtype: ~azure_databricks_management_client.models.GroupIdInformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GroupIdInformation"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-04-01-preview" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'groupId': self._serialize.url("group_id", group_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('GroupIdInformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}'} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py index c80b1a69ad44..ec4fb642db5d 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py @@ -32,7 +32,7 @@ class VNetPeeringOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databricks.models + :type models: ~azure_databricks_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -65,7 +65,7 @@ def get( :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualNetworkPeering, or the result of cls(response) - :rtype: ~azure.mgmt.databricks.models.VirtualNetworkPeering or None + :rtype: ~azure_databricks_management_client.models.VirtualNetworkPeering or None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkPeering"]] @@ -100,7 +100,7 @@ def get( if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -153,7 +153,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -179,8 +179,8 @@ def begin_delete( :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -277,7 +277,7 @@ def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -311,15 +311,15 @@ def begin_create_or_update( :type peering_name: str :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet Peering. - :type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering + :type virtual_network_peering_parameters: ~azure_databricks_management_client.models.VirtualNetworkPeering :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.VirtualNetworkPeering] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -385,7 +385,7 @@ def list_by_workspace( :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualNetworkPeeringList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.VirtualNetworkPeeringList] + :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.VirtualNetworkPeeringList] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringList"] @@ -435,7 +435,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py index bcfd38c1fff1..dbfe20ee9e9c 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py @@ -32,7 +32,7 @@ class WorkspacesOperations(object): instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.databricks.models + :type models: ~azure_databricks_management_client.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. @@ -62,7 +62,7 @@ def get( :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: Workspace, or the result of cls(response) - :rtype: ~azure.mgmt.databricks.models.Workspace + :rtype: ~azure_databricks_management_client.models.Workspace :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] @@ -70,7 +70,7 @@ def get( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" # Construct URL @@ -96,7 +96,7 @@ def get( if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = self._deserialize('Workspace', pipeline_response) @@ -119,7 +119,7 @@ def _delete_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" # Construct URL @@ -145,7 +145,7 @@ def _delete_initial( if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: @@ -168,8 +168,8 @@ def begin_delete( :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) @@ -231,7 +231,7 @@ def _create_or_update_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -262,7 +262,7 @@ def _create_or_update_initial( if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: @@ -292,15 +292,15 @@ def begin_create_or_update( :param workspace_name: The name of the workspace. :type workspace_name: str :param parameters: Parameters supplied to the create or update a workspace. - :type parameters: ~azure.mgmt.databricks.models.Workspace + :type parameters: ~azure_databricks_management_client.models.Workspace :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] + :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -362,7 +362,7 @@ def _update_initial( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" @@ -393,7 +393,7 @@ def _update_initial( if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None @@ -421,15 +421,15 @@ def begin_update( :param workspace_name: The name of the workspace. :type workspace_name: str :param parameters: The update to the workspace. - :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate + :type parameters: ~azure_databricks_management_client.models.WorkspaceUpdate :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy + :keyword polling: By default, your polling method will be ARMPolling. + Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] + :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] @@ -490,7 +490,7 @@ def list_by_resource_group( :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.WorkspaceListResult] + :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] @@ -498,7 +498,7 @@ def list_by_resource_group( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -539,7 +539,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) @@ -559,7 +559,7 @@ def list_by_subscription( :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.WorkspaceListResult] + :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] @@ -567,7 +567,7 @@ def list_by_subscription( 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" + api_version = "2021-04-01-preview" accept = "application/json" def prepare_request(next_link=None): @@ -607,7 +607,7 @@ def get_next(next_link=None): response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)