diff --git a/src/machinelearningservices/HISTORY.rst b/src/machinelearningservices/HISTORY.rst
new file mode 100644
index 00000000000..1c139576ba0
--- /dev/null
+++ b/src/machinelearningservices/HISTORY.rst
@@ -0,0 +1,8 @@
+.. :changelog:
+
+Release History
+===============
+
+0.1.0
+++++++
+* Initial release.
diff --git a/src/machinelearningservices/README.md b/src/machinelearningservices/README.md
new file mode 100644
index 00000000000..ee9d0842110
--- /dev/null
+++ b/src/machinelearningservices/README.md
@@ -0,0 +1,255 @@
+# Azure CLI machinelearningservices Extension #
+This is the extension for machinelearningservices
+
+### How to use ###
+Install this extension using the below CLI command
+```
+az extension add --name machinelearningservices
+```
+
+### Included Features ###
+#### machinelearningservices workspace ####
+##### Create #####
+```
+az machinelearningservices workspace create \
+ --identity type="SystemAssigned,UserAssigned" userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testuai":{}} \
+ --location "eastus2euap" --description "test description" \
+ --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/microsoft.insights/components/testinsights" \
+ --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+ --identity user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testuai" \
+ --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+ --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" \
+ --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" \
+ --resource-group "workspace-1234" --name "testworkspace"
+
+az machinelearningservices workspace wait --created --resource-group "{rg}" --name "{myWorkspace}"
+```
+##### Show #####
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Update #####
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+ --public-network-access "Disabled" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Diagnose #####
+```
+az machinelearningservices workspace diagnose --application-insights "{}" --container-registry "{}" \
+ --dns-resolution "{}" --key-vault "{}" --nsg "{}" --others "{}" --resource-lock "{}" --storage-account "{}" \
+ --udr "{}" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List-key #####
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### List-notebook-access-token #####
+```
+az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" \
+ --name "testworkspace"
+```
+##### List-notebook-key #####
+```
+az machinelearningservices workspace list-notebook-key --resource-group "testrg123" --name "workspaces123"
+```
+##### List-outbound-network-dependency-endpoint #####
+```
+az machinelearningservices workspace list-outbound-network-dependency-endpoint --resource-group "workspace-1234" \
+ --name "testworkspace"
+```
+##### List-storage-account-key #####
+```
+az machinelearningservices workspace list-storage-account-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Prepare-notebook #####
+```
+az machinelearningservices workspace prepare-notebook --resource-group "testrg123" --name "workspaces123"
+```
+##### Resync-key #####
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+#### machinelearningservices usage ####
+##### List #####
+```
+az machinelearningservices usage list --location "eastus"
+```
+#### machinelearningservices virtual-machine-size ####
+##### List #####
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+#### machinelearningservices quota ####
+##### List #####
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Update #####
+```
+az machinelearningservices quota update --location "eastus" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+```
+#### machinelearningservices compute ####
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"description\\":\\"some compute\\",\\"computeType\\":\\"Kubernetes\\",\\"properties\\":{\\"defaultInstanceType\\":\\"defaultInstanceType\\",\\"instanceTypes\\":{\\"defaultInstanceType\\":{\\"nodeSelector\\":null,\\"resources\\":{\\"limits\\":{\\"cpu\\":\\"1\\",\\"memory\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null},\\"requests\\":{\\"cpu\\":\\"1\\",\\"memory\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null}}}},\\"namespace\\":\\"default\\"},\\"resourceId\\":\\"/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/resourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"DataFactory\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"AKS\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Create #####
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" \
+ --properties "{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Update #####
+```
+az machinelearningservices compute update --name "compute123" \
+ --scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List-key #####
+```
+az machinelearningservices compute list-key --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List-node #####
+```
+az machinelearningservices compute list-node --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Restart #####
+```
+az machinelearningservices compute restart --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Start #####
+```
+az machinelearningservices compute start --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Stop #####
+```
+az machinelearningservices compute stop --name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices compute delete --name "compute123" --resource-group "testrg123" \
+ --underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+#### machinelearningservices private-endpoint-connection ####
+##### Create #####
+```
+az machinelearningservices private-endpoint-connection create --name "{privateEndpointConnectionName}" \
+ --private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices private-endpoint-connection list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices private-link-resource ####
+##### List #####
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices workspace-connection ####
+##### Create #####
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --auth-type "PAT" \
+ --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+ --workspace-name "workspace-1"
+```
+##### Show #####
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### List #####
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" \
+ --target "www.facebook.com" --workspace-name "workspace-1"
+```
+##### Delete #####
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+#### machinelearningservices workspace-feature ####
+##### List #####
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+#### machinelearningservices workspace-sku ####
+##### List #####
+```
+az machinelearningservices workspace-sku list
+```
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/__init__.py
new file mode 100644
index 00000000000..b234b2a3aa6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/__init__.py
@@ -0,0 +1,50 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+from azure.cli.core import AzCommandsLoader
+from azext_machinelearningservices.generated._help import helps # pylint: disable=unused-import
+try:
+ from azext_machinelearningservices.manual._help import helps # pylint: disable=reimported
+except ImportError:
+ pass
+
+
+class AzureMachineLearningWorkspacesCommandsLoader(AzCommandsLoader):
+
+ def __init__(self, cli_ctx=None):
+ from azure.cli.core.commands import CliCommandType
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices_cl
+ machinelearningservices_custom = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.custom#{}',
+ client_factory=cf_machinelearningservices_cl)
+ parent = super(AzureMachineLearningWorkspacesCommandsLoader, self)
+ parent.__init__(cli_ctx=cli_ctx, custom_command_type=machinelearningservices_custom)
+
+ def load_command_table(self, args):
+ from azext_machinelearningservices.generated.commands import load_command_table
+ load_command_table(self, args)
+ try:
+ from azext_machinelearningservices.manual.commands import load_command_table as load_command_table_manual
+ load_command_table_manual(self, args)
+ except ImportError:
+ pass
+ return self.command_table
+
+ def load_arguments(self, command):
+ from azext_machinelearningservices.generated._params import load_arguments
+ load_arguments(self, command)
+ try:
+ from azext_machinelearningservices.manual._params import load_arguments as load_arguments_manual
+ load_arguments_manual(self, command)
+ except ImportError:
+ pass
+
+
+COMMAND_LOADER_CLS = AzureMachineLearningWorkspacesCommandsLoader
diff --git a/src/machinelearningservices/azext_machinelearningservices/action.py b/src/machinelearningservices/azext_machinelearningservices/action.py
new file mode 100644
index 00000000000..d95d53bf711
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/action.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.action import * # noqa: F403
+try:
+ from .manual.action import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
new file mode 100644
index 00000000000..cfc30c747c7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
@@ -0,0 +1,4 @@
+{
+ "azext.isExperimental": true,
+ "azext.minCliCoreVersion": "2.15.0"
+}
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/custom.py b/src/machinelearningservices/azext_machinelearningservices/custom.py
new file mode 100644
index 00000000000..dbe9d5f9742
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/custom.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.custom import * # noqa: F403
+try:
+ from .manual.custom import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
new file mode 100644
index 00000000000..1316b4e5390
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
@@ -0,0 +1,56 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+def cf_machinelearningservices_cl(cli_ctx, *_):
+ from azure.cli.core.commands.client_factory import get_mgmt_service_client
+ from azext_machinelearningservices.vendored_sdks.machinelearningservices import AzureMachineLearningWorkspaces
+ return get_mgmt_service_client(cli_ctx,
+ AzureMachineLearningWorkspaces)
+
+
+def cf_workspace(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspaces
+
+
+def cf_usage(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).usages
+
+
+def cf_virtual_machine_size(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).virtual_machine_sizes
+
+
+def cf_quota(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).quotas
+
+
+def cf_compute(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).compute
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_endpoint_connections
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_link_resources
+
+
+def cf_workspace_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_connections
+
+
+def cf_workspace_feature(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_features
+
+
+def cf_workspace_sku(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_skus
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_help.py b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
new file mode 100644
index 00000000000..57cd72fa2af
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
@@ -0,0 +1,665 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.help_files import helps
+
+
+helps['machinelearningservices workspace'] = """
+ type: group
+ short-summary: Manage workspace with machinelearningservices
+"""
+
+helps['machinelearningservices workspace list'] = """
+ type: command
+ short-summary: "Lists all the available machine learning workspaces under the specified resource group. And Lists \
+all the available machine learning workspaces under the specified subscription."
+ examples:
+ - name: Get Workspaces by Resource Group
+ text: |-
+ az machinelearningservices workspace list --resource-group "workspace-1234"
+ - name: Get Workspaces by subscription
+ text: |-
+ az machinelearningservices workspace list
+"""
+
+helps['machinelearningservices workspace show'] = """
+ type: command
+ short-summary: "Gets the properties of the specified machine learning workspace."
+ examples:
+ - name: Get Workspace
+ text: |-
+ az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace create'] = """
+ type: command
+ short-summary: "Create a workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --shared-private-link-resources
+ short-summary: "The list of shared private link resources in this workspace."
+ long-summary: |
+ Usage: --shared-private-link-resources name=XX private-link-resource-id=XX group-id=XX request-message=XX \
+status=XX
+
+ name: Unique name of the private link.
+ private-link-resource-id: The resource id that private link links to.
+ group-id: The private link resource group id.
+ request-message: Request message.
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+
+ Multiple actions can be specified by using more than one --shared-private-link-resources argument.
+ - name: --identity
+ short-summary: "The identity that will be used to access the key vault for encryption at rest."
+ long-summary: |
+ Usage: --identity user-assigned-identity=XX
+
+ user-assigned-identity: The ArmId of the user assigned identity that will be used to access the customer \
+managed key vault
+ - name: --key-vault-properties
+ short-summary: "Customer Key vault properties."
+ long-summary: |
+ Usage: --key-vault-properties key-vault-arm-id=XX key-identifier=XX identity-client-id=XX
+
+ key-vault-arm-id: Required. The ArmId of the keyVault where the customer owned encryption key is present.
+ key-identifier: Required. Key vault uri to access the encryption key.
+ identity-client-id: For future use - The client id of the identity which will be used to access key vault.
+ examples:
+ - name: Create Workspace
+ text: |-
+ az machinelearningservices workspace create --identity type="SystemAssigned,UserAssigned" \
+userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mi\
+crosoft.ManagedIdentity/userAssignedIdentities/testuai":{}} --location "eastus2euap" --description "test description" \
+--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\
+rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\
+urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" --identity \
+user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mic\
+rosoft.ManagedIdentity/userAssignedIdentities/testuai" --key-vault-properties identity-client-id="" \
+key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" \
+key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft\
+.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false --key-vault \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/\
+testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-22\
+22-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/priva\
+teLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace update'] = """
+ type: command
+ short-summary: "Updates a machine learning workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Update Workspace
+ text: |-
+ az machinelearningservices workspace update --description "new description" --friendly-name "New \
+friendly name" --public-network-access "Disabled" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace delete'] = """
+ type: command
+ short-summary: "Deletes a machine learning workspace."
+ examples:
+ - name: Delete Workspace
+ text: |-
+ az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace diagnose'] = """
+ type: command
+ short-summary: "Diagnose workspace setup issue."
+ examples:
+ - name: Diagnose Workspace
+ text: |-
+ az machinelearningservices workspace diagnose --application-insights "{}" --container-registry "{}" \
+--dns-resolution "{}" --key-vault "{}" --nsg "{}" --others "{}" --resource-lock "{}" --storage-account "{}" --udr "{}" \
+--resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-key'] = """
+ type: command
+ short-summary: "Lists all the keys associated with this workspace. This includes keys for the storage account, app \
+insights and password for container registry."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace list-notebook-access-token'] = """
+ type: command
+ short-summary: "return notebook access token and refresh token."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" \
+--name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-notebook-key'] = """
+ type: command
+ short-summary: "List keys of a notebook."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-notebook-key --resource-group "testrg123" --name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace list-outbound-network-dependency-endpoint'] = """
+ type: command
+ short-summary: "Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) \
+programmatically."
+ examples:
+ - name: ListOutboundNetworkDependenciesEndpoints
+ text: |-
+ az machinelearningservices workspace list-outbound-network-dependency-endpoint --resource-group \
+"workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-storage-account-key'] = """
+ type: command
+ short-summary: "List storage account keys of a workspace."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-storage-account-key --resource-group "testrg123" --name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace prepare-notebook'] = """
+ type: command
+ short-summary: "Prepare a notebook."
+ examples:
+ - name: Prepare Notebook
+ text: |-
+ az machinelearningservices workspace prepare-notebook --resource-group "testrg123" --name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace resync-key'] = """
+ type: command
+ short-summary: "Resync all the keys associated with this workspace. This includes keys for the storage account, \
+app insights and password for container registry."
+ examples:
+ - name: Resync Workspace Keys
+ text: |-
+ az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices workspace is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+created.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--created
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+deleted.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--deleted
+"""
+
+helps['machinelearningservices usage'] = """
+ type: group
+ short-summary: Manage usage with machinelearningservices
+"""
+
+helps['machinelearningservices usage list'] = """
+ type: command
+ short-summary: "Gets the current usage information as well as limits for AML resources for given subscription and \
+location."
+ examples:
+ - name: List Usages
+ text: |-
+ az machinelearningservices usage list --location "eastus"
+"""
+
+helps['machinelearningservices virtual-machine-size'] = """
+ type: group
+ short-summary: Manage virtual machine size with machinelearningservices
+"""
+
+helps['machinelearningservices virtual-machine-size list'] = """
+ type: command
+ short-summary: "Returns supported VM Sizes in a location."
+ examples:
+ - name: List VM Sizes
+ text: |-
+ az machinelearningservices virtual-machine-size list --location "eastus"
+"""
+
+helps['machinelearningservices quota'] = """
+ type: group
+ short-summary: Manage quota with machinelearningservices
+"""
+
+helps['machinelearningservices quota list'] = """
+ type: command
+ short-summary: "Gets the currently assigned Workspace Quotas based on VMFamily."
+ examples:
+ - name: List workspace quotas by VMFamily
+ text: |-
+ az machinelearningservices quota list --location "eastus"
+"""
+
+helps['machinelearningservices quota update'] = """
+ type: command
+ short-summary: "Update quota for each VM family in workspace."
+ parameters:
+ - name: --value
+ short-summary: "The list for update quota."
+ long-summary: |
+ Usage: --value id=XX type=XX limit=XX unit=XX
+
+ id: Specifies the resource ID.
+ type: Specifies the resource type.
+ limit: The maximum permitted quota of the resource.
+ unit: An enum describing the unit of quota measurement.
+
+ Multiple actions can be specified by using more than one --value argument.
+ examples:
+ - name: update quotas
+ text: |-
+ az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServi\
+ces/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Ma\
+chineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 \
+unit="Count" --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0\
+000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standa\
+rd_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+"""
+
+helps['machinelearningservices compute'] = """
+ type: group
+ short-summary: Manage compute with machinelearningservices
+"""
+
+helps['machinelearningservices compute list'] = """
+ type: command
+ short-summary: "Gets computes in specified workspace."
+ examples:
+ - name: Get Computes
+ text: |-
+ az machinelearningservices compute list --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute show'] = """
+ type: command
+ short-summary: "Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not \
+returned - use 'keys' nested resource to get them."
+ examples:
+ - name: Get a AKS Compute
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Get a AML Compute
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Get a Kubernetes Compute
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Get an ComputeInstance
+ text: |-
+ az machinelearningservices compute show --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Attach a Kubernetes Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"description\\":\\"some compute\\",\\"computeType\\":\\"Kubernetes\\",\\"properties\\":{\\"defaultInstanceType\\":\
+\\"defaultInstanceType\\",\\"instanceTypes\\":{\\"defaultInstanceType\\":{\\"nodeSelector\\":null,\\"resources\\":{\\"l\
+imits\\":{\\"cpu\\":\\"1\\",\\"memory\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null},\\"requests\\":{\\"cpu\\":\\"1\\",\\"memo\
+ry\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null}}}},\\"namespace\\":\\"default\\"},\\"resourceId\\":\\"/subscriptions/34adfa4\
+f-cedf-4dc0-ba29-b6d1a69ab345/resourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-\
+56826-c9b00420020b2\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osT\
+ype\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"\
+minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0\
+0000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery\
+/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"DataFactory\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an AKS Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AKS\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeIns\
+tanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"0\
+0000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\
+\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"vmSize\\":\\"STANDARD_NC6\\"}}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute update'] = """
+ type: command
+ short-summary: "Updates properties of a compute. This call will overwrite a compute if it exists. This is a \
+nonrecoverable operation."
+ parameters:
+ - name: --scale-settings
+ short-summary: "scale settings for AML Compute"
+ long-summary: |
+ Usage: --scale-settings max-node-count=XX min-node-count=XX node-idle-time-before-scale-down=XX
+
+ max-node-count: Required. Max number of nodes to use
+ min-node-count: Min number of nodes to use
+ node-idle-time-before-scale-down: Node Idle Time before scaling down amlCompute. This string needs to be \
+in the RFC Format.
+ examples:
+ - name: Update a AmlCompute Compute
+ text: |-
+ az machinelearningservices compute update --name "compute123" --scale-settings max-node-count=4 \
+min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute delete'] = """
+ type: command
+ short-summary: "Deletes specified Machine Learning compute."
+ examples:
+ - name: Delete Compute
+ text: |-
+ az machinelearningservices compute delete --name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute list-key'] = """
+ type: command
+ short-summary: "Gets secrets related to Machine Learning compute (storage keys, service credentials, etc)."
+ examples:
+ - name: List AKS Compute Keys
+ text: |-
+ az machinelearningservices compute list-key --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute list-node'] = """
+ type: command
+ short-summary: "Get the details (e.g IP address, port etc) of all the compute nodes in the compute."
+ examples:
+ - name: Get compute nodes information for a compute
+ text: |-
+ az machinelearningservices compute list-node --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute restart'] = """
+ type: command
+ short-summary: "Posts a restart action to a compute instance."
+ examples:
+ - name: Restart ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute restart --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute start'] = """
+ type: command
+ short-summary: "Posts a start action to a compute instance."
+ examples:
+ - name: Start ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute start --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute stop'] = """
+ type: command
+ short-summary: "Posts a stop action to a compute instance."
+ examples:
+ - name: Stop ComputeInstance Compute
+ text: |-
+ az machinelearningservices compute stop --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices compute wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices compute is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices compute is successfully \
+created.
+ text: |-
+ az machinelearningservices compute wait --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices compute is successfully \
+updated.
+ text: |-
+ az machinelearningservices compute wait --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices compute is successfully \
+deleted.
+ text: |-
+ az machinelearningservices compute wait --name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123" --deleted
+"""
+
+helps['machinelearningservices private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with machinelearningservices
+"""
+
+helps['machinelearningservices private-endpoint-connection list'] = """
+ type: command
+ short-summary: "List all the private endpoint connections associated with the workspace."
+ examples:
+ - name: StorageAccountListPrivateEndpointConnections
+ text: |-
+ az machinelearningservices private-endpoint-connection list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceGetPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection create'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+ examples:
+ - name: WorkspacePutPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection create --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection update'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+"""
+
+helps['machinelearningservices private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceDeletePrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with machinelearningservices
+"""
+
+helps['machinelearningservices private-link-resource list'] = """
+ type: command
+ short-summary: "Gets the private link resources that need to be created for a workspace."
+ examples:
+ - name: WorkspaceListPrivateLinkResources
+ text: |-
+ az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices workspace-connection'] = """
+ type: group
+ short-summary: Manage workspace connection with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-connection list'] = """
+ type: command
+ short-summary: "List all connections under a AML workspace."
+ examples:
+ - name: ListWorkspaceConnections
+ text: |-
+ az machinelearningservices workspace-connection list --category "ACR" --resource-group \
+"resourceGroup-1" --target "www.facebook.com" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection show'] = """
+ type: command
+ short-summary: "Get the detail of a workspace connection."
+ examples:
+ - name: GetWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection create'] = """
+ type: command
+ short-summary: "Add a new workspace connection."
+ examples:
+ - name: CreateWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection create --connection-name "connection-1" --auth-type \
+"PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection delete'] = """
+ type: command
+ short-summary: "Delete a workspace connection."
+ examples:
+ - name: DeleteWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+--resource-group "resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-feature'] = """
+ type: group
+ short-summary: Manage workspace feature with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-feature list'] = """
+ type: command
+ short-summary: "Lists all enabled features for a workspace."
+ examples:
+ - name: List Workspace features
+ text: |-
+ az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices workspace-sku'] = """
+ type: group
+ short-summary: Manage workspace sku with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-sku list'] = """
+ type: command
+ short-summary: "Lists all skus with associated features."
+ examples:
+ - name: List Skus
+ text: |-
+ az machinelearningservices workspace-sku list
+"""
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_params.py b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
new file mode 100644
index 00000000000..b1b88686dfe
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
@@ -0,0 +1,370 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-statements
+
+from azure.cli.core.commands.parameters import (
+ tags_type,
+ get_three_state_flag,
+ get_enum_type,
+ resource_group_name_type,
+ get_location_type
+)
+from azure.cli.core.commands.validators import (
+ get_default_location_from_resource_group,
+ validate_file_or_dict
+)
+from azext_machinelearningservices.action import (
+ AddSku,
+ AddSharedPrivateLinkResources,
+ AddIdentity,
+ AddKeyVaultProperties,
+ AddValue,
+ AddScaleSettings,
+ AddPrivateLinkServiceConnectionState
+)
+
+
+def load_arguments(self, _):
+
+ with self.argument_context('machinelearningservices workspace list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices workspace show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace. This name in mutable')
+ c.argument('key_vault', type=str, help='ARM id of the key vault associated with this workspace. This cannot be '
+ 'changed once the workspace has been created')
+ c.argument('application_insights', type=str, help='ARM id of the application insights associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('container_registry', type=str, help='ARM id of the container registry associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('storage_account', type=str, help='ARM id of the storage account associated with this workspace. '
+ 'This cannot be changed once the workspace has been created')
+ c.argument('discovery_url', type=str, help='Url for the discovery service to identify regional endpoints for '
+ 'machine learning experimentation services')
+ c.argument('hbi_workspace', arg_type=get_three_state_flag(), help='The flag to signal HBI data in the '
+ 'workspace and reduce diagnostic data collected by the service')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('allow_public_access_when_behind_vnet', arg_type=get_three_state_flag(), help='The flag to indicate '
+ 'whether to allow public access when behind VNet.')
+ c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether requests '
+ 'from Public Network are allowed.')
+ c.argument('shared_private_link_resources', action=AddSharedPrivateLinkResources, nargs='+', help='The list of '
+ 'shared private link resources in this workspace.')
+ c.argument('primary_user_assigned_identity', type=str, help='The user assigned identity resource id that '
+ 'represents the workspace identity.')
+ c.argument('collections_throughput', type=int, help='The throughput of the collections in cosmosdb database',
+ arg_group='Service Managed Resources Settings Cosmos Db')
+ c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not the '
+ 'encryption is enabled for the workspace.', arg_group='Encryption')
+ c.argument('identity', action=AddIdentity, nargs='+', help='The identity that will be used to access the key '
+ 'vault for encryption at rest.', arg_group='Encryption')
+ c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='Customer Key vault '
+ 'properties.', arg_group='Encryption')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices workspace update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace.')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('primary_user_assigned_identity', type=str, help='The user assigned identity resource id that '
+ 'represents the workspace identity.')
+ c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether requests '
+ 'from Public Network are allowed.')
+ c.argument('collections_throughput', type=int, help='The throughput of the collections in cosmosdb database',
+ arg_group='Service Managed Resources Settings Cosmos Db')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices workspace delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace diagnose') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+ c.argument('udr', type=validate_file_or_dict, help='Setting for diagnosing user defined routing Expected '
+ 'value: json-string/@json-file.', arg_group='Value')
+ c.argument('nsg', type=validate_file_or_dict, help='Setting for diagnosing network security group Expected '
+ 'value: json-string/@json-file.', arg_group='Value')
+ c.argument('resource_lock', type=validate_file_or_dict, help='Setting for diagnosing resource lock Expected '
+ 'value: json-string/@json-file.', arg_group='Value')
+ c.argument('dns_resolution', type=validate_file_or_dict, help='Setting for diagnosing dns resolution Expected '
+ 'value: json-string/@json-file.', arg_group='Value')
+ c.argument('storage_account', type=validate_file_or_dict, help='Setting for diagnosing dependent storage '
+ 'account Expected value: json-string/@json-file.', arg_group='Value')
+ c.argument('key_vault', type=validate_file_or_dict, help='Setting for diagnosing dependent key vault Expected '
+ 'value: json-string/@json-file.', arg_group='Value')
+ c.argument('container_registry', type=validate_file_or_dict, help='Setting for diagnosing dependent container '
+ 'registry Expected value: json-string/@json-file.', arg_group='Value')
+ c.argument('application_insights', type=validate_file_or_dict, help='Setting for diagnosing dependent '
+ 'application insights Expected value: json-string/@json-file.', arg_group='Value')
+ c.argument('others', type=validate_file_or_dict, help='Setting for diagnosing unclassified category of '
+ 'problems Expected value: json-string/@json-file.', arg_group='Value')
+
+ with self.argument_context('machinelearningservices workspace list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-notebook-access-token') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-notebook-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-outbound-network-dependency-endpoint') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-storage-account-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace prepare-notebook') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace resync-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices usage list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices virtual-machine-size list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota update') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name')
+ c.argument('value', action=AddValue, nargs='+', help='The list for update quota.')
+ c.argument('quota_update_parameters_location', type=str, help='Region of workspace quota to be updated.',
+ id_part='name')
+
+ with self.argument_context('machinelearningservices compute list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices compute show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.')
+ c.argument('properties', type=validate_file_or_dict, help='Compute properties Expected value: '
+ 'json-string/@json-file.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices compute update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+ c.argument('scale_settings', action=AddScaleSettings, nargs='+', help='scale settings for AML Compute',
+ arg_group='Properties')
+
+ with self.argument_context('machinelearningservices compute delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+ c.argument('underlying_resource_action', arg_type=get_enum_type(['Delete', 'Detach']), help='Delete the '
+ 'underlying compute if \'Delete\', or detach the underlying compute from workspace if \'Detach\'.')
+
+ with self.argument_context('machinelearningservices compute list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices compute list-node') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices compute restart') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute start') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute stop') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices compute wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', options_list=['--name', '-n', '--compute-name'], type=str, help='Name of the Azure '
+ 'Machine Learning compute.', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.ignore('properties')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-link-resource list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices workspace-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('auth_type', type=str, help='Authorization type of the workspace connection.')
+ c.argument('value', type=str, help='Value details of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices workspace-feature list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
new file mode 100644
index 00000000000..b33a44c1ebf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
@@ -0,0 +1,9 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/action.py b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
new file mode 100644
index 00000000000..7ba1b039abc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
@@ -0,0 +1,219 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access
+
+import argparse
+from collections import defaultdict
+from knack.util import CLIError
+
+
+class AddSku(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sku = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'tier':
+ d['tier'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, tier'.
+ format(k))
+ return d
+
+
+class AddSharedPrivateLinkResources(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddSharedPrivateLinkResources, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'private-link-resource-id':
+ d['private_link_resource_id'] = v[0]
+ elif kl == 'group-id':
+ d['group_id'] = v[0]
+ elif kl == 'request-message':
+ d['request_message'] = v[0]
+ elif kl == 'status':
+ d['status'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter shared_private_link_resources. All '
+ 'possible keys are: name, private-link-resource-id, group-id, request-message, status'.
+ format(k))
+ return d
+
+
+class AddIdentity(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.identity = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'user-assigned-identity':
+ d['user_assigned_identity'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: '
+ 'user-assigned-identity'.format(k))
+ return d
+
+
+class AddKeyVaultProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.key_vault_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'key-vault-arm-id':
+ d['key_vault_arm_id'] = v[0]
+ elif kl == 'key-identifier':
+ d['key_identifier'] = v[0]
+ elif kl == 'identity-client-id':
+ d['identity_client_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter key_vault_properties. All possible keys '
+ 'are: key-vault-arm-id, key-identifier, identity-client-id'.format(k))
+ return d
+
+
+class AddValue(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddValue, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'id':
+ d['id'] = v[0]
+ elif kl == 'type':
+ d['type'] = v[0]
+ elif kl == 'limit':
+ d['limit'] = v[0]
+ elif kl == 'unit':
+ d['unit'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter value. All possible keys are: id, type, '
+ 'limit, unit'.format(k))
+ return d
+
+
+class AddScaleSettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scale_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['min_node_count'] = 0
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'max-node-count':
+ d['max_node_count'] = v[0]
+ elif kl == 'min-node-count':
+ d['min_node_count'] = v[0]
+ elif kl == 'node-idle-time-before-scale-down':
+ d['node_idle_time_before_scale_down'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scale_settings. All possible keys are: '
+ 'max-node-count, min-node-count, node-idle-time-before-scale-down'.format(k))
+ return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/commands.py b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
new file mode 100644
index 00000000000..cbd81934597
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
@@ -0,0 +1,148 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-locals
+
+from azure.cli.core.commands import CliCommandType
+
+
+def load_command_table(self, _):
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace
+ machinelearningservices_workspace = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspaces_ope'
+ 'rations#WorkspacesOperations.{}',
+ client_factory=cf_workspace)
+ with self.command_group('machinelearningservices workspace', machinelearningservices_workspace,
+ client_factory=cf_workspace) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_show')
+ g.custom_command('create', 'machinelearningservices_workspace_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_workspace_update')
+ g.custom_command('delete', 'machinelearningservices_workspace_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('diagnose', 'machinelearningservices_workspace_diagnose', supports_no_wait=True)
+ g.custom_command('list-key', 'machinelearningservices_workspace_list_key')
+ g.custom_command('list-notebook-access-token', 'machinelearningservices_workspace_list_notebook_access_token')
+ g.custom_command('list-notebook-key', 'machinelearningservices_workspace_list_notebook_key')
+ g.custom_command('list-outbound-network-dependency-endpoint', 'machinelearningservices_workspace_list_outbound_'
+ 'network_dependency_endpoint')
+ g.custom_command('list-storage-account-key', 'machinelearningservices_workspace_list_storage_account_key')
+ g.custom_command('prepare-notebook', 'machinelearningservices_workspace_prepare_notebook',
+ supports_no_wait=True)
+ g.custom_command('resync-key', 'machinelearningservices_workspace_resync_key', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_workspace_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_usage
+ machinelearningservices_usage = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._usages_operati'
+ 'ons#UsagesOperations.{}',
+ client_factory=cf_usage)
+ with self.command_group('machinelearningservices usage', machinelearningservices_usage,
+ client_factory=cf_usage) as g:
+ g.custom_command('list', 'machinelearningservices_usage_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_virtual_machine_size
+ machinelearningservices_virtual_machine_size = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._virtual_machin'
+ 'e_sizes_operations#VirtualMachineSizesOperations.{}',
+ client_factory=cf_virtual_machine_size)
+ with self.command_group('machinelearningservices virtual-machine-size',
+ machinelearningservices_virtual_machine_size,
+ client_factory=cf_virtual_machine_size) as g:
+ g.custom_command('list', 'machinelearningservices_virtual_machine_size_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_quota
+ machinelearningservices_quota = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._quotas_operati'
+ 'ons#QuotasOperations.{}',
+ client_factory=cf_quota)
+ with self.command_group('machinelearningservices quota', machinelearningservices_quota,
+ client_factory=cf_quota) as g:
+ g.custom_command('list', 'machinelearningservices_quota_list')
+ g.custom_command('update', 'machinelearningservices_quota_update')
+
+ from azext_machinelearningservices.generated._client_factory import cf_compute
+ machinelearningservices_compute = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._compute_operat'
+ 'ions#ComputeOperations.{}',
+ client_factory=cf_compute)
+ with self.command_group('machinelearningservices compute', machinelearningservices_compute,
+ client_factory=cf_compute) as g:
+ g.custom_command('list', 'machinelearningservices_compute_list')
+ g.custom_show_command('show', 'machinelearningservices_compute_show')
+ g.custom_command('create', 'machinelearningservices_compute_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_compute_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_compute_delete', supports_no_wait=True, confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_compute_list_key')
+ g.custom_command('list-node', 'machinelearningservices_compute_list_node')
+ g.custom_command('restart', 'machinelearningservices_compute_restart', supports_no_wait=True)
+ g.custom_command('start', 'machinelearningservices_compute_start', supports_no_wait=True)
+ g.custom_command('stop', 'machinelearningservices_compute_stop', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_compute_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_endpoint_connection
+ machinelearningservices_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_endpoi'
+ 'nt_connections_operations#PrivateEndpointConnectionsOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('machinelearningservices private-endpoint-connection',
+ machinelearningservices_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_command('list', 'machinelearningservices_private_endpoint_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_private_endpoint_connection_show')
+ g.custom_command('create', 'machinelearningservices_private_endpoint_connection_create')
+ g.generic_update_command('update', setter_arg_name='properties',
+ custom_func_name='machinelearningservices_private_endpoint_connection_update')
+ g.custom_command('delete', 'machinelearningservices_private_endpoint_connection_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_link_resource
+ machinelearningservices_private_link_resource = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_link_r'
+ 'esources_operations#PrivateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('machinelearningservices private-link-resource',
+ machinelearningservices_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_command('list', 'machinelearningservices_private_link_resource_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_connection
+ machinelearningservices_workspace_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_conn'
+ 'ections_operations#WorkspaceConnectionsOperations.{}',
+ client_factory=cf_workspace_connection)
+ with self.command_group('machinelearningservices workspace-connection',
+ machinelearningservices_workspace_connection,
+ client_factory=cf_workspace_connection) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_connection_show')
+ g.custom_command('create', 'machinelearningservices_workspace_connection_create')
+ g.custom_command('delete', 'machinelearningservices_workspace_connection_delete', confirmation=True)
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_feature
+ machinelearningservices_workspace_feature = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_feat'
+ 'ures_operations#WorkspaceFeaturesOperations.{}',
+ client_factory=cf_workspace_feature)
+ with self.command_group('machinelearningservices workspace-feature', machinelearningservices_workspace_feature,
+ client_factory=cf_workspace_feature) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_feature_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_sku
+ machinelearningservices_workspace_sku = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_skus'
+ '_operations#WorkspaceSkusOperations.{}',
+ client_factory=cf_workspace_sku)
+ with self.command_group('machinelearningservices workspace-sku', machinelearningservices_workspace_sku,
+ client_factory=cf_workspace_sku) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_sku_list')
+
+ with self.command_group('machinelearningservices', is_experimental=True):
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/custom.py b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
new file mode 100644
index 00000000000..e0ebfd9af35
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
@@ -0,0 +1,517 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=line-too-long
+# pylint: disable=too-many-lines
+# pylint: disable=unused-argument
+
+from azure.cli.core.util import sdk_no_wait
+
+
+def machinelearningservices_workspace_list(client,
+ resource_group_name=None,
+ skip=None):
+ if resource_group_name:
+ return client.list_by_resource_group(resource_group_name=resource_group_name,
+ skip=skip)
+ return client.list_by_subscription(skip=skip)
+
+
+def machinelearningservices_workspace_show(client,
+ resource_group_name,
+ workspace_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_create(client,
+ resource_group_name,
+ workspace_name,
+ location=None,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None,
+ key_vault=None,
+ application_insights=None,
+ container_registry=None,
+ storage_account=None,
+ discovery_url=None,
+ hbi_workspace=None,
+ image_build_compute=None,
+ allow_public_access_when_behind_vnet=None,
+ public_network_access=None,
+ shared_private_link_resources=None,
+ primary_user_assigned_identity=None,
+ collections_throughput=None,
+ status=None,
+ identity=None,
+ key_vault_properties=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ if hbi_workspace is None:
+ hbi_workspace = False
+ if allow_public_access_when_behind_vnet is None:
+ allow_public_access_when_behind_vnet = False
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ parameters['key_vault'] = key_vault
+ parameters['application_insights'] = application_insights
+ parameters['container_registry'] = container_registry
+ parameters['storage_account'] = storage_account
+ parameters['discovery_url'] = discovery_url
+ parameters['hbi_workspace'] = False if hbi_workspace is None else hbi_workspace
+ parameters['image_build_compute'] = image_build_compute
+ parameters['allow_public_access_when_behind_vnet'] = False if allow_public_access_when_behind_vnet is None else allow_public_access_when_behind_vnet
+ parameters['public_network_access'] = public_network_access
+ parameters['shared_private_link_resources'] = shared_private_link_resources
+ parameters['primary_user_assigned_identity'] = primary_user_assigned_identity
+ parameters['cosmos_db'] = {}
+ parameters['cosmos_db']['collections_throughput'] = collections_throughput
+ parameters['encryption'] = {}
+ parameters['encryption']['status'] = status
+ parameters['encryption']['identity'] = identity
+ parameters['encryption']['key_vault_properties'] = key_vault_properties
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_update(client,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None,
+ image_build_compute=None,
+ primary_user_assigned_identity=None,
+ public_network_access=None,
+ collections_throughput=None,
+ type_=None,
+ user_assigned_identities=None):
+ parameters = {}
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ parameters['image_build_compute'] = image_build_compute
+ parameters['primary_user_assigned_identity'] = primary_user_assigned_identity
+ parameters['public_network_access'] = public_network_access
+ parameters['cosmos_db'] = {}
+ parameters['cosmos_db']['collections_throughput'] = collections_throughput
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_delete(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_diagnose(client,
+ resource_group_name,
+ workspace_name,
+ udr=None,
+ nsg=None,
+ resource_lock=None,
+ dns_resolution=None,
+ storage_account=None,
+ key_vault=None,
+ container_registry=None,
+ application_insights=None,
+ others=None,
+ no_wait=False):
+ parameters = {}
+ parameters['value'] = {}
+ parameters['value']['udr'] = udr
+ parameters['value']['nsg'] = nsg
+ parameters['value']['resource_lock'] = resource_lock
+ parameters['value']['dns_resolution'] = dns_resolution
+ parameters['value']['storage_account'] = storage_account
+ parameters['value']['key_vault'] = key_vault
+ parameters['value']['container_registry'] = container_registry
+ parameters['value']['application_insights'] = application_insights
+ parameters['value']['others'] = others
+ return sdk_no_wait(no_wait,
+ client.begin_diagnose,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_notebook_access_token(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_notebook_access_token(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_notebook_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_notebook_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_outbound_network_dependency_endpoint(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_outbound_network_dependencies_endpoints(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_storage_account_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_storage_account_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_prepare_notebook(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_prepare_notebook,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_resync_key(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_resync_keys,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_usage_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_virtual_machine_size_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_update(client,
+ location,
+ value=None,
+ quota_update_parameters_location=None):
+ parameters = {}
+ parameters['value'] = value
+ parameters['location'] = quota_update_parameters_location
+ return client.update(location=location,
+ parameters=parameters)
+
+
+def machinelearningservices_compute_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skip=skip)
+
+
+def machinelearningservices_compute_show(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ properties=None,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ no_wait=False):
+ parameters = {}
+ parameters['properties'] = properties
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_compute_update(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ scale_settings=None,
+ no_wait=False):
+ parameters = {}
+ parameters['properties'] = {}
+ parameters['properties']['scale_settings'] = scale_settings
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_compute_delete(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ underlying_resource_action,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action)
+
+
+def machinelearningservices_compute_list_key(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_list_node(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_nodes(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_restart(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_restart,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_start(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_start,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_compute_stop(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_stop,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_private_endpoint_connection_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_private_endpoint_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ location=None,
+ tags=None,
+ sku=None,
+ private_link_service_connection_state=None,
+ type_=None,
+ user_assigned_identities=None):
+ properties = {}
+ properties['location'] = location
+ properties['tags'] = tags
+ properties['sku'] = sku
+ properties['private_link_service_connection_state'] = private_link_service_connection_state
+ properties['identity'] = {}
+ properties['identity']['type'] = type_
+ properties['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.create_or_update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ properties=properties)
+
+
+def machinelearningservices_private_endpoint_connection_update(instance,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ location=None,
+ tags=None,
+ sku=None,
+ private_link_service_connection_state=None,
+ type_=None,
+ user_assigned_identities=None):
+ if location is not None:
+ instance.location = location
+ if tags is not None:
+ instance.tags = tags
+ if sku is not None:
+ instance.sku = sku
+ if private_link_service_connection_state is not None:
+ instance.private_link_service_connection_state = private_link_service_connection_state
+ if type_ is not None:
+ instance.identity.type = type_
+ if user_assigned_identities is not None:
+ instance.identity.user_assigned_identities = user_assigned_identities
+ return instance
+
+
+def machinelearningservices_private_endpoint_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_link_resource_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_connection_list(client,
+ resource_group_name,
+ workspace_name,
+ target=None,
+ category=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ target=target,
+ category=category)
+
+
+def machinelearningservices_workspace_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_workspace_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ connection_name,
+ category=None,
+ target=None,
+ auth_type=None,
+ value=None):
+ parameters = {}
+ parameters['category'] = category
+ parameters['target'] = target
+ parameters['auth_type'] = auth_type
+ parameters['value'] = value
+ parameters['value_format'] = "JSON"
+ return client.create(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_workspace_feature_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_sku_list(client):
+ return client.list()
diff --git a/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
new file mode 100644
index 00000000000..70488e93851
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+import inspect
+import logging
+import os
+import sys
+import traceback
+import datetime as dt
+
+from azure.core.exceptions import AzureError
+from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError
+
+
+logger = logging.getLogger('azure.cli.testsdk')
+logger.addHandler(logging.StreamHandler())
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+exceptions = []
+test_map = dict()
+SUCCESSED = "successed"
+FAILED = "failed"
+
+
+def try_manual(func):
+ def import_manual_function(origin_func):
+ from importlib import import_module
+ decorated_path = inspect.getfile(origin_func).lower()
+ module_path = __path__[0].lower()
+ if not decorated_path.startswith(module_path):
+ raise Exception("Decorator can only be used in submodules!")
+ manual_path = os.path.join(
+ decorated_path[module_path.rfind(os.path.sep) + 1:])
+ manual_file_path, manual_file_name = os.path.split(manual_path)
+ module_name, _ = os.path.splitext(manual_file_name)
+ manual_module = "..manual." + \
+ ".".join(manual_file_path.split(os.path.sep) + [module_name, ])
+ return getattr(import_module(manual_module, package=__name__), origin_func.__name__)
+
+ def get_func_to_call():
+ func_to_call = func
+ try:
+ func_to_call = import_manual_function(func)
+ logger.info("Found manual override for %s(...)", func.__name__)
+ except (ImportError, AttributeError):
+ pass
+ return func_to_call
+
+ def wrapper(*args, **kwargs):
+ func_to_call = get_func_to_call()
+ logger.info("running %s()...", func.__name__)
+ try:
+ test_map[func.__name__] = dict()
+ test_map[func.__name__]["result"] = SUCCESSED
+ test_map[func.__name__]["error_message"] = ""
+ test_map[func.__name__]["error_stack"] = ""
+ test_map[func.__name__]["error_normalized"] = ""
+ test_map[func.__name__]["start_dt"] = dt.datetime.utcnow()
+ ret = func_to_call(*args, **kwargs)
+ except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit,
+ JMESPathCheckAssertionError) as e:
+ use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE")
+ if use_exception_cache is None or use_exception_cache.lower() != "true":
+ raise
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ test_map[func.__name__]["result"] = FAILED
+ test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500]
+ test_map[func.__name__]["error_stack"] = traceback.format_exc().replace(
+ "\r\n", " ").replace("\n", " ")[:500]
+ logger.info("--------------------------------------")
+ logger.info("step exception: %s", e)
+ logger.error("--------------------------------------")
+ logger.error("step exception in %s: %s", func.__name__, e)
+ logger.info(traceback.format_exc())
+ exceptions.append((func.__name__, sys.exc_info()))
+ else:
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ return ret
+
+ if inspect.isclass(func):
+ return get_func_to_call()
+ return wrapper
+
+
+def calc_coverage(filename):
+ filename = filename.split(".")[0]
+ coverage_name = filename + "_coverage.md"
+ with open(coverage_name, "w") as f:
+ f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n")
+ total = len(test_map)
+ covered = 0
+ for k, v in test_map.items():
+ if not k.startswith("step_"):
+ total -= 1
+ continue
+ if v["result"] == SUCCESSED:
+ covered += 1
+ f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|"
+ "{end_dt}|\n".format(step_name=k, **v))
+ f.write("Coverage: {}/{}\n".format(covered, total))
+ print("Create coverage\n", file=sys.stderr)
+
+
+def raise_if():
+ if exceptions:
+ if len(exceptions) <= 1:
+ raise exceptions[0][1][1]
+ message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1]))
+ message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]])
+ raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2])
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
new file mode 100644
index 00000000000..772a5b21cd1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
@@ -0,0 +1,577 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+from .. import try_manual
+
+
+# EXAMPLE: /Workspaces/put/Create Workspace
+@try_manual
+def step_workspace_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace create '
+ '--identity type="SystemAssigned,UserAssigned" userAssignedIdentities={{"/subscriptions/00000000-1111-2222'
+ '-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentiti'
+ 'es/testuai":{{}}}} '
+ '--location "eastus2euap" '
+ '--description "test description" '
+ '--application-insights "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.insights'
+ '/components/testinsights" '
+ '--container-registry "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ContainerR'
+ 'egistry/registries/testRegistry" '
+ '--identity user-assigned-identity="/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microso'
+ 'ft.ManagedIdentity/userAssignedIdentities/testuai" '
+ '--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/'
+ 'aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/'
+ 'providers/Microsoft.KeyVault/vaults/testkv" '
+ '--status "Enabled" '
+ '--friendly-name "HelloName" '
+ '--hbi-workspace false '
+ '--key-vault "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vaults/tes'
+ 'tkv" '
+ '--shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/{subscript'
+ 'ion_id}/resourceGroups/{rg}/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkRes'
+ 'ources/{myPrivateLinkResource}" group-id="{myPrivateLinkResource}" request-message="Please approve" '
+ 'status="Approved" '
+ '--storage-account "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storage/sto'
+ 'rageAccounts/{sa}" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=[])
+ test.cmd('az machinelearningservices workspace wait --created '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspace
+@try_manual
+def step_workspace_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace show '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by Resource Group
+@try_manual
+def step_workspace_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by subscription
+@try_manual
+def step_workspace_list2(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '-g ""',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/ListOutboundNetworkDependenciesEndpoints
+@try_manual
+def step_workspace_list_outbound(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-outbound-network-dependency-endpoint '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/patch/Update Workspace
+@try_manual
+def step_workspace_update(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace update '
+ '--description "new description" '
+ '--friendly-name "New friendly name" '
+ '--public-network-access "Disabled" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Diagnose Workspace
+@try_manual
+def step_workspace_diagnose(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace diagnose '
+ '--application-insights "{{}}" '
+ '--container-registry "{{}}" '
+ '--dns-resolution "{{}}" '
+ '--key-vault "{{}}" '
+ '--nsg "{{}}" '
+ '--others "{{}}" '
+ '--resource-lock "{{}}" '
+ '--storage-account "{{}}" '
+ '--udr "{{}}" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/List Workspace Keys
+@try_manual
+def step_workspace_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Prepare Notebook
+@try_manual
+def step_workspace_prepare_notebook(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace prepare-notebook '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Resync Workspace Keys
+@try_manual
+def step_workspace_resync_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace resync-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Attach a Kubernetes Compute
+@try_manual
+def step_compute_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"description\\":\\"some compute\\",\\"computeType\\":\\"Kubernetes\\",\\"properties\\":'
+ '{{\\"defaultInstanceType\\":\\"defaultInstanceType\\",\\"instanceTypes\\":{{\\"defaultInstanceType\\":{{'
+ '\\"nodeSelector\\":null,\\"resources\\":{{\\"limits\\":{{\\"cpu\\":\\"1\\",\\"memory\\":\\"4Gi\\",\\"nvid'
+ 'ia.com/gpu\\":null}},\\"requests\\":{{\\"cpu\\":\\"1\\",\\"memory\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null}'
+ '}}}}}}},\\"namespace\\":\\"default\\"}},\\"resourceId\\":\\"/subscriptions/{subscription_id}/resourcegrou'
+ 'ps/{rg_3}/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create a AML Compute
+@try_manual
+def step_compute_create2(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{{\\"enableNodePublicIp\\":true,\\"is'
+ 'olatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\'
+ '"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\'
+ '"}},\\"virtualMachineImage\\":{{\\"id\\":\\"/subscriptions/{subscription_id}/resourceGroups/{rg_5}/provid'
+ 'ers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"}},\\"vmPriority'
+ '\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create a DataFactory Compute
+@try_manual
+def step_compute_create3(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"DataFactory\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create an AKS Compute
+@try_manual
+def step_compute_create4(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"AKS\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create an ComputeInstance Compute
+@try_manual
+def step_compute_create5(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{{\\"applicationSharingPolicy\\"'
+ ':\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings'
+ '\\":{{\\"assignedUser\\":{{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00'
+ '000000-0000-0000-0000-000000000000\\"}}}},\\"sshSettings\\":{{\\"sshPublicAccess\\":\\"Disabled\\"}},\\"s'
+ 'ubnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/put/Create an ComputeInstance Compute with minimal inputs
+@try_manual
+def step_compute_create6(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute create '
+ '--name "{myCompute}" '
+ '--location "eastus" '
+ '--properties "{{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{{\\"vmSize\\":\\"STANDARD_NC6\\'
+ '"}}}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/get/Get a AKS Compute
+@try_manual
+def step_compute_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute show '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/get/Get a AML Compute
+@try_manual
+def step_compute_show2(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ return step_compute_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /Compute/get/Get a Kubernetes Compute
+@try_manual
+def step_compute_show3(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ return step_compute_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /Compute/get/Get an ComputeInstance
+@try_manual
+def step_compute_show4(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ return step_compute_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /Compute/get/Get Computes
+@try_manual
+def step_compute_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/patch/Update a AmlCompute Compute
+@try_manual
+def step_compute_update(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute update '
+ '--name "{myCompute}" '
+ '--scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Get compute nodes information for a compute
+@try_manual
+def step_compute_list_node(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute list-node '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/List AKS Compute Keys
+@try_manual
+def step_compute_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute list-key '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Restart ComputeInstance Compute
+@try_manual
+def step_compute_restart(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute restart '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Start ComputeInstance Compute
+@try_manual
+def step_compute_start(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute start '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/post/Stop ComputeInstance Compute
+@try_manual
+def step_compute_stop(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute stop '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Compute/delete/Delete Compute
+@try_manual
+def step_compute_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices compute delete -y '
+ '--name "{myCompute}" '
+ '--resource-group "{rg_3}" '
+ '--underlying-resource-action "Delete" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/put/WorkspacePutPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection create '
+ '--name "{myPrivateEndpointConnection}" '
+ '--private-link-service-connection-state description="Auto-Approved" status="Approved" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/StorageAccountListPrivateEndpointConnections
+@try_manual
+def step_private_endpoint_connection_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection list '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/WorkspaceGetPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection show '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/delete/WorkspaceDeletePrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection delete -y '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateLinkResources/get/WorkspaceListPrivateLinkResources
+@try_manual
+def step_private_link_resource_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-link-resource list '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/get/List workspace quotas by VMFamily
+@try_manual
+def step_quota_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/post/update quotas
+@try_manual
+def step_quota_update(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota update '
+ '--location "eastus" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_4}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace3}/quotas/{myQuot'
+ 'a}" limit=100 unit="Count" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_4}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace4}/quotas/{myQuot'
+ 'a}" limit=200 unit="Count"',
+ checks=checks)
+
+
+# EXAMPLE: /Usages/get/List Usages
+@try_manual
+def step_usage_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices usage list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /VirtualMachineSizes/get/List VM Sizes
+@try_manual
+def step_virtual_machine_size_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices virtual-machine-size list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/put/CreateWorkspaceConnection
+@try_manual
+def step_workspace_connection_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection create '
+ '--connection-name "connection-1" '
+ '--auth-type "PAT" '
+ '--category "ACR" '
+ '--target "www.facebook.com" '
+ '--value "secrets" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/GetWorkspaceConnection
+@try_manual
+def step_workspace_connection_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection show '
+ '--connection-name "connection-1" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/ListWorkspaceConnections
+@try_manual
+def step_workspace_connection_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection list '
+ '--category "ACR" '
+ '--resource-group "{rg_7}" '
+ '--target "www.facebook.com" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/delete/DeleteWorkspaceConnection
+@try_manual
+def step_workspace_connection_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection delete -y '
+ '--connection-name "connection-1" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceFeatures/get/List Workspace features
+@try_manual
+def step_workspace_feature_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-feature list '
+ '--resource-group "{rg_5}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/delete/Delete Workspace
+@try_manual
+def step_workspace_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace delete -y '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceSkus/get/List Skus
+@try_manual
+def step_workspace_sku_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-sku list',
+ checks=checks)
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
new file mode 100644
index 00000000000..c0193dd61e2
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
@@ -0,0 +1,279 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+import os
+from azure.cli.testsdk import ScenarioTest
+from azure.cli.testsdk import ResourceGroupPreparer
+from azure.cli.testsdk import StorageAccountPreparer
+from .example_steps import step_workspace_create
+from .example_steps import step_workspace_show
+from .example_steps import step_workspace_list
+from .example_steps import step_workspace_list2
+from .example_steps import step_workspace_list_outbound
+from .example_steps import step_workspace_update
+from .example_steps import step_workspace_diagnose
+from .example_steps import step_workspace_list_key
+from .example_steps import step_workspace_prepare_notebook
+from .example_steps import step_workspace_resync_key
+from .example_steps import step_compute_create
+from .example_steps import step_compute_create2
+from .example_steps import step_compute_create3
+from .example_steps import step_compute_create4
+from .example_steps import step_compute_create5
+from .example_steps import step_compute_create6
+from .example_steps import step_compute_show
+from .example_steps import step_compute_show2
+from .example_steps import step_compute_show3
+from .example_steps import step_compute_show4
+from .example_steps import step_compute_list
+from .example_steps import step_compute_update
+from .example_steps import step_compute_list_node
+from .example_steps import step_compute_list_key
+from .example_steps import step_compute_restart
+from .example_steps import step_compute_start
+from .example_steps import step_compute_stop
+from .example_steps import step_compute_delete
+from .example_steps import step_private_endpoint_connection_create
+from .example_steps import step_private_endpoint_connection_list
+from .example_steps import step_private_endpoint_connection_show
+from .example_steps import step_private_endpoint_connection_delete
+from .example_steps import step_private_link_resource_list
+from .example_steps import step_quota_list
+from .example_steps import step_quota_update
+from .example_steps import step_usage_list
+from .example_steps import step_virtual_machine_size_list
+from .example_steps import step_workspace_connection_create
+from .example_steps import step_workspace_connection_show
+from .example_steps import step_workspace_connection_list
+from .example_steps import step_workspace_connection_delete
+from .example_steps import step_workspace_feature_list
+from .example_steps import step_workspace_delete
+from .example_steps import step_workspace_sku_list
+from .. import (
+ try_manual,
+ raise_if,
+ calc_coverage
+)
+
+
+TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+
+# Env setup_scenario
+@try_manual
+def setup_scenario(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7):
+ pass
+
+
+# Env cleanup_scenario
+@try_manual
+def cleanup_scenario(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7):
+ pass
+
+
+# Testcase: Scenario
+@try_manual
+def call_scenario(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7):
+ setup_scenario(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7)
+ step_workspace_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("encryption.identity.userAssignedIdentity", "/subscriptions/{subscription_id}/resourceGroups/{rg}/pr"
+ "oviders/Microsoft.ManagedIdentity/userAssignedIdentities/testuai", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("encryption.identity.userAssignedIdentity", "/subscriptions/{subscription_id}/resourceGroups/{rg}/pr"
+ "oviders/Microsoft.ManagedIdentity/userAssignedIdentities/testuai", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_workspace_list2(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check('length(@)', 2),
+ ])
+ step_workspace_list_outbound(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_update(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "new description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("friendlyName", "New friendly name", case_sensitive=False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ test.check("publicNetworkAccess", "Disabled", case_sensitive=False),
+ ])
+ step_workspace_diagnose(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_prepare_notebook(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_resync_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_compute_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create2(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create3(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create4(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create5(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_create6(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_show2(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ ])
+ step_compute_show3(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ test.check("location", "eastus", case_sensitive=False),
+ ])
+ step_compute_show4(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ ])
+ step_compute_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_compute_update(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myCompute}", case_sensitive=False),
+ ])
+ step_compute_list_node(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_compute_list_key(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_compute_restart(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_compute_start(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_compute_stop(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_compute_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_private_endpoint_connection_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_private_endpoint_connection_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_private_link_resource_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_quota_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_quota_update(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_usage_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_virtual_machine_size_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_connection_create(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_connection_show(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_connection_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_connection_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_feature_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_delete(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ step_workspace_sku_list(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7, checks=[])
+ cleanup_scenario(test, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7)
+
+
+# Test class for Scenario
+@try_manual
+class MachinelearningservicesScenarioTest(ScenarioTest):
+
+ def __init__(self, *args, **kwargs):
+ super(MachinelearningservicesScenarioTest, self).__init__(*args, **kwargs)
+ self.kwargs.update({
+ 'subscription_id': self.get_subscription_id()
+ })
+
+ self.kwargs.update({
+ 'myWorkspace6': 'default',
+ 'myPrivateLinkResource2': 'default',
+ 'myWorkspace3': 'demo_workspace1',
+ 'myWorkspace4': 'demo_workspace2',
+ 'myWorkspace': 'testworkspace',
+ 'myWorkspace2': 'workspaces123',
+ 'myWorkspace5': 'workspace-1',
+ 'myQuota': 'Standard_DSv2_Family_Cluster_Dedicated_vCPUs',
+ 'myCompute': 'compute123',
+ 'myPrivateEndpointConnection': '{privateEndpointConnectionName}',
+ 'myPrivateLinkResource': 'Sql',
+ })
+
+
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_workspace-1234'[:7], key='rg',
+ parameter_name='rg')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_accountcrud-1234'[:7], key='rg_2',
+ parameter_name='rg_2')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg'[:7], key='rg_4', parameter_name='rg_4')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_testrg123'[:7], key='rg_3',
+ parameter_name='rg_3')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_myResourceGroup'[:7], key='rg_5',
+ parameter_name='rg_5')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg-1234'[:7], key='rg_6',
+ parameter_name='rg_6')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1'[:7], key='rg_7',
+ parameter_name='rg_7')
+ @StorageAccountPreparer(name_prefix='clitestmachinelearningservices_testStorageAccount'[:7], key='sa',
+ resource_group_parameter_name='rg_2')
+ def test_machinelearningservices_Scenario(self, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7):
+ call_scenario(self, rg, rg_2, rg_4, rg_3, rg_5, rg_6, rg_7)
+ calc_coverage(__file__)
+ raise_if()
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
new file mode 100644
index 00000000000..dad2c6eeb01
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
@@ -0,0 +1,16 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..1e98d12fc55
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
@@ -0,0 +1,119 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.mgmt.core import ARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Optional
+
+ from azure.core.credentials import TokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import ComputeOperations
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import WorkspaceSkusOperations
+from . import models
+
+
+class AzureMachineLearningWorkspaces(object):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.operations.WorkspacesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.operations.QuotasOperations
+ :ivar compute: ComputeOperations operations
+ :vartype compute: azure_machine_learning_workspaces.operations.ComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.operations.PrivateLinkResourcesOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.operations.WorkspaceConnectionsOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.operations.WorkspaceFeaturesOperations
+ :ivar workspace_skus: WorkspaceSkusOperations operations
+ :vartype workspace_skus: azure_machine_learning_workspaces.operations.WorkspaceSkusOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ base_url=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.compute = ComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_skus = WorkspaceSkusOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureMachineLearningWorkspaces
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
new file mode 100644
index 00000000000..3cfe1a3eb6a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
@@ -0,0 +1,70 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2021-07-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
new file mode 100644
index 00000000000..872474577c4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
@@ -0,0 +1,10 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..c97a061b14a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,113 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Optional, TYPE_CHECKING
+
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import ComputeOperations
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import WorkspaceConnectionsOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import WorkspaceSkusOperations
+from .. import models
+
+
+class AzureMachineLearningWorkspaces(object):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.aio.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.aio.operations.WorkspacesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.aio.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.aio.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.aio.operations.QuotasOperations
+ :ivar compute: ComputeOperations operations
+ :vartype compute: azure_machine_learning_workspaces.aio.operations.ComputeOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.aio.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.aio.operations.PrivateLinkResourcesOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.aio.operations.WorkspaceConnectionsOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.aio.operations.WorkspaceFeaturesOperations
+ :ivar workspace_skus: WorkspaceSkusOperations operations
+ :vartype workspace_skus: azure_machine_learning_workspaces.aio.operations.WorkspaceSkusOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ base_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.compute = ComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_skus = WorkspaceSkusOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
new file mode 100644
index 00000000000..7f093ae21bc
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
@@ -0,0 +1,66 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: The ID of the target subscription.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ **kwargs: Any
+ ) -> None:
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2021-07-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
new file mode 100644
index 00000000000..44c7bf6aeeb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
@@ -0,0 +1,33 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._compute_operations import ComputeOperations
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._workspace_skus_operations import WorkspaceSkusOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'ComputeOperations',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'WorkspaceConnectionsOperations',
+ 'WorkspaceFeaturesOperations',
+ 'WorkspaceSkusOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_compute_operations.py
new file mode 100644
index 00000000000..a8532f610ae
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_compute_operations.py
@@ -0,0 +1,1082 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class ComputeOperations:
+ """ComputeOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedComputeResourcesList"]:
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeResource":
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.AmlComputeNodesInformation"]:
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeSecrets":
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ async def _start_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._start_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def begin_start(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._start_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def _stop_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._stop_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def begin_stop(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._stop_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def _restart_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._restart_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
+
+ async def begin_restart(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._restart_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
new file mode 100644
index 00000000000..9ac2aaed005
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations:
+ """Operations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.OperationListResult"]:
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..864e9f5f288
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,314 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations:
+ """PrivateEndpointConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.PrivateEndpointConnectionListResult"]:
+ """List all the private endpoint connections associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PrivateEndpointConnectionListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ properties: "models.PrivateEndpointConnection",
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> None:
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..2bd410c7798
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
@@ -0,0 +1,99 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations:
+ """PrivateLinkResourcesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.PrivateLinkResourceListResult":
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
new file mode 100644
index 00000000000..f269b0ae3ab
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
@@ -0,0 +1,176 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations:
+ """QuotasOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def update(
+ self,
+ location: str,
+ parameters: "models.QuotaUpdateParameters",
+ **kwargs
+ ) -> "models.UpdateWorkspaceQuotasResult":
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListWorkspaceQuotas"]:
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
new file mode 100644
index 00000000000..01ba06853ce
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
@@ -0,0 +1,113 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations:
+ """UsagesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListUsagesResult"]:
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..6da006f1a61
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations:
+ """VirtualMachineSizesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> "models.VirtualMachineSizeListResult":
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..46d63999e8c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
@@ -0,0 +1,321 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations:
+ """WorkspaceConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ target: Optional[str] = None,
+ category: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedWorkspaceConnectionsList"]:
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ parameters: "models.WorkspaceConnection",
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Add a new workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..fe9434c8c74
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations:
+ """WorkspaceFeaturesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListAmlUserFeatureResult"]:
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_skus_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_skus_operations.py
new file mode 100644
index 00000000000..ad1565e23cb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_skus_operations.py
@@ -0,0 +1,109 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceSkusOperations:
+ """WorkspaceSkusOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.SkuListResult"]:
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..408a8b24f7c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
@@ -0,0 +1,1217 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations:
+ """WorkspacesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.Workspace":
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> Optional["models.Workspace"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> AsyncLROPoller["models.Workspace"]:
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.WorkspaceUpdateParameters",
+ **kwargs
+ ) -> "models.Workspace":
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def _diagnose_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: Optional["models.DiagnoseWorkspaceParameters"] = None,
+ **kwargs
+ ) -> Optional["models.DiagnoseResponseResult"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DiagnoseResponseResult"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._diagnose_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if parameters is not None:
+ body_content = self._serialize.body(parameters, 'DiagnoseWorkspaceParameters')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _diagnose_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose'} # type: ignore
+
+ async def begin_diagnose(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: Optional["models.DiagnoseWorkspaceParameters"] = None,
+ **kwargs
+ ) -> AsyncLROPoller["models.DiagnoseResponseResult"]:
+ """Diagnose workspace setup issue.
+
+ Diagnose workspace setup issue.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameter of diagnosing workspace health.
+ :type parameters: ~azure_machine_learning_workspaces.models.DiagnoseWorkspaceParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.DiagnoseResponseResult]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DiagnoseResponseResult"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._diagnose_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_diagnose.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListWorkspaceKeysResult":
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ async def _resync_keys_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resync_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _resync_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ async def begin_resync_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._resync_keys_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_notebook_access_token(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.NotebookAccessTokenResult":
+ """return notebook access token and refresh token.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: NotebookAccessTokenResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.NotebookAccessTokenResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookAccessTokenResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_access_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} # type: ignore
+
+ async def _prepare_notebook_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> Optional["models.NotebookResourceInfo"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_notebook_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_notebook_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def begin_prepare_notebook(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller["models.NotebookResourceInfo"]:
+ """Prepare a notebook.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._prepare_notebook_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare_notebook.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def list_storage_account_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListStorageAccountKeysResult":
+ """List storage account keys of a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListStorageAccountKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListStorageAccountKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListStorageAccountKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_storage_account_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_storage_account_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} # type: ignore
+
+ async def list_notebook_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListNotebookKeysResult":
+ """List keys of a notebook.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
+
+ async def list_outbound_network_dependencies_endpoints(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ExternalFqdnResponse":
+ """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) programmatically.
+
+ Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs)
+ programmatically.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ExternalFqdnResponse, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ExternalFqdnResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ExternalFqdnResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ExternalFqdnResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
new file mode 100644
index 00000000000..2cd7754f2f5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
@@ -0,0 +1,463 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import Aks
+ from ._models_py3 import AksComputeSecrets
+ from ._models_py3 import AksComputeSecretsProperties
+ from ._models_py3 import AksNetworkingConfiguration
+ from ._models_py3 import AksProperties
+ from ._models_py3 import AmlCompute
+ from ._models_py3 import AmlComputeNodeInformation
+ from ._models_py3 import AmlComputeNodesInformation
+ from ._models_py3 import AmlComputeProperties
+ from ._models_py3 import AmlUserFeature
+ from ._models_py3 import AssignedUser
+ from ._models_py3 import AutoPauseProperties
+ from ._models_py3 import AutoScaleProperties
+ from ._models_py3 import ClusterUpdateParameters
+ from ._models_py3 import Components1D3SwueSchemasComputeresourceAllof1
+ from ._models_py3 import Compute
+ from ._models_py3 import ComputeInstance
+ from ._models_py3 import ComputeInstanceApplication
+ from ._models_py3 import ComputeInstanceConnectivityEndpoints
+ from ._models_py3 import ComputeInstanceCreatedBy
+ from ._models_py3 import ComputeInstanceLastOperation
+ from ._models_py3 import ComputeInstanceProperties
+ from ._models_py3 import ComputeInstanceSshSettings
+ from ._models_py3 import ComputeResource
+ from ._models_py3 import ComputeSecrets
+ from ._models_py3 import ContainerResourceRequirements
+ from ._models_py3 import CosmosDbSettings
+ from ._models_py3 import DataFactory
+ from ._models_py3 import DataLakeAnalytics
+ from ._models_py3 import DataLakeAnalyticsProperties
+ from ._models_py3 import Databricks
+ from ._models_py3 import DatabricksComputeSecrets
+ from ._models_py3 import DatabricksComputeSecretsProperties
+ from ._models_py3 import DatabricksProperties
+ from ._models_py3 import DiagnoseRequestProperties
+ from ._models_py3 import DiagnoseResponseResult
+ from ._models_py3 import DiagnoseResponseResultValue
+ from ._models_py3 import DiagnoseResult
+ from ._models_py3 import DiagnoseWorkspaceParameters
+ from ._models_py3 import EncryptionProperty
+ from ._models_py3 import ErrorAdditionalInfo
+ from ._models_py3 import ErrorDetail
+ from ._models_py3 import ErrorResponse
+ from ._models_py3 import EstimatedVmPrice
+ from ._models_py3 import EstimatedVmPrices
+ from ._models_py3 import ExternalFqdnResponse
+ from ._models_py3 import FqdnEndpoint
+ from ._models_py3 import FqdnEndpointDetail
+ from ._models_py3 import FqdnEndpoints
+ from ._models_py3 import FqdnEndpointsProperties
+ from ._models_py3 import HdInsight
+ from ._models_py3 import HdInsightProperties
+ from ._models_py3 import Identity
+ from ._models_py3 import IdentityForCmk
+ from ._models_py3 import InstanceTypeSchema
+ from ._models_py3 import InstanceTypeSchemaResources
+ from ._models_py3 import KeyVaultProperties
+ from ._models_py3 import Kubernetes
+ from ._models_py3 import KubernetesProperties
+ from ._models_py3 import KubernetesSchema
+ from ._models_py3 import ListAmlUserFeatureResult
+ from ._models_py3 import ListNotebookKeysResult
+ from ._models_py3 import ListStorageAccountKeysResult
+ from ._models_py3 import ListUsagesResult
+ from ._models_py3 import ListWorkspaceKeysResult
+ from ._models_py3 import ListWorkspaceQuotas
+ from ._models_py3 import NodeStateCounts
+ from ._models_py3 import NotebookAccessTokenResult
+ from ._models_py3 import NotebookPreparationError
+ from ._models_py3 import NotebookResourceInfo
+ from ._models_py3 import Operation
+ from ._models_py3 import OperationDisplay
+ from ._models_py3 import OperationListResult
+ from ._models_py3 import PaginatedComputeResourcesList
+ from ._models_py3 import PaginatedWorkspaceConnectionsList
+ from ._models_py3 import Password
+ from ._models_py3 import PersonalComputeInstanceSettings
+ from ._models_py3 import PrivateEndpoint
+ from ._models_py3 import PrivateEndpointConnection
+ from ._models_py3 import PrivateEndpointConnectionListResult
+ from ._models_py3 import PrivateLinkResource
+ from ._models_py3 import PrivateLinkResourceListResult
+ from ._models_py3 import PrivateLinkServiceConnectionState
+ from ._models_py3 import QuotaBaseProperties
+ from ._models_py3 import QuotaUpdateParameters
+ from ._models_py3 import RegistryListCredentialsResult
+ from ._models_py3 import Resource
+ from ._models_py3 import ResourceId
+ from ._models_py3 import ResourceName
+ from ._models_py3 import ResourceQuota
+ from ._models_py3 import ResourceSkuLocationInfo
+ from ._models_py3 import ResourceSkuZoneDetails
+ from ._models_py3 import Restriction
+ from ._models_py3 import ScaleSettings
+ from ._models_py3 import ScaleSettingsInformation
+ from ._models_py3 import ScriptReference
+ from ._models_py3 import ScriptsToExecute
+ from ._models_py3 import ServiceManagedResourcesSettings
+ from ._models_py3 import ServicePrincipalCredentials
+ from ._models_py3 import SetupScripts
+ from ._models_py3 import SharedPrivateLinkResource
+ from ._models_py3 import Sku
+ from ._models_py3 import SkuCapability
+ from ._models_py3 import SkuListResult
+ from ._models_py3 import SslConfiguration
+ from ._models_py3 import SynapseSpark
+ from ._models_py3 import SynapseSparkProperties
+ from ._models_py3 import SystemData
+ from ._models_py3 import SystemService
+ from ._models_py3 import UpdateWorkspaceQuotas
+ from ._models_py3 import UpdateWorkspaceQuotasResult
+ from ._models_py3 import Usage
+ from ._models_py3 import UsageName
+ from ._models_py3 import UserAccountCredentials
+ from ._models_py3 import UserAssignedIdentity
+ from ._models_py3 import VirtualMachine
+ from ._models_py3 import VirtualMachineImage
+ from ._models_py3 import VirtualMachineProperties
+ from ._models_py3 import VirtualMachineSecrets
+ from ._models_py3 import VirtualMachineSize
+ from ._models_py3 import VirtualMachineSizeListResult
+ from ._models_py3 import VirtualMachineSshCredentials
+ from ._models_py3 import Workspace
+ from ._models_py3 import WorkspaceConnection
+ from ._models_py3 import WorkspaceListResult
+ from ._models_py3 import WorkspaceSku
+ from ._models_py3 import WorkspaceUpdateParameters
+except (SyntaxError, ImportError):
+ from ._models import Aks # type: ignore
+ from ._models import AksComputeSecrets # type: ignore
+ from ._models import AksComputeSecretsProperties # type: ignore
+ from ._models import AksNetworkingConfiguration # type: ignore
+ from ._models import AksProperties # type: ignore
+ from ._models import AmlCompute # type: ignore
+ from ._models import AmlComputeNodeInformation # type: ignore
+ from ._models import AmlComputeNodesInformation # type: ignore
+ from ._models import AmlComputeProperties # type: ignore
+ from ._models import AmlUserFeature # type: ignore
+ from ._models import AssignedUser # type: ignore
+ from ._models import AutoPauseProperties # type: ignore
+ from ._models import AutoScaleProperties # type: ignore
+ from ._models import ClusterUpdateParameters # type: ignore
+ from ._models import Components1D3SwueSchemasComputeresourceAllof1 # type: ignore
+ from ._models import Compute # type: ignore
+ from ._models import ComputeInstance # type: ignore
+ from ._models import ComputeInstanceApplication # type: ignore
+ from ._models import ComputeInstanceConnectivityEndpoints # type: ignore
+ from ._models import ComputeInstanceCreatedBy # type: ignore
+ from ._models import ComputeInstanceLastOperation # type: ignore
+ from ._models import ComputeInstanceProperties # type: ignore
+ from ._models import ComputeInstanceSshSettings # type: ignore
+ from ._models import ComputeResource # type: ignore
+ from ._models import ComputeSecrets # type: ignore
+ from ._models import ContainerResourceRequirements # type: ignore
+ from ._models import CosmosDbSettings # type: ignore
+ from ._models import DataFactory # type: ignore
+ from ._models import DataLakeAnalytics # type: ignore
+ from ._models import DataLakeAnalyticsProperties # type: ignore
+ from ._models import Databricks # type: ignore
+ from ._models import DatabricksComputeSecrets # type: ignore
+ from ._models import DatabricksComputeSecretsProperties # type: ignore
+ from ._models import DatabricksProperties # type: ignore
+ from ._models import DiagnoseRequestProperties # type: ignore
+ from ._models import DiagnoseResponseResult # type: ignore
+ from ._models import DiagnoseResponseResultValue # type: ignore
+ from ._models import DiagnoseResult # type: ignore
+ from ._models import DiagnoseWorkspaceParameters # type: ignore
+ from ._models import EncryptionProperty # type: ignore
+ from ._models import ErrorAdditionalInfo # type: ignore
+ from ._models import ErrorDetail # type: ignore
+ from ._models import ErrorResponse # type: ignore
+ from ._models import EstimatedVmPrice # type: ignore
+ from ._models import EstimatedVmPrices # type: ignore
+ from ._models import ExternalFqdnResponse # type: ignore
+ from ._models import FqdnEndpoint # type: ignore
+ from ._models import FqdnEndpointDetail # type: ignore
+ from ._models import FqdnEndpoints # type: ignore
+ from ._models import FqdnEndpointsProperties # type: ignore
+ from ._models import HdInsight # type: ignore
+ from ._models import HdInsightProperties # type: ignore
+ from ._models import Identity # type: ignore
+ from ._models import IdentityForCmk # type: ignore
+ from ._models import InstanceTypeSchema # type: ignore
+ from ._models import InstanceTypeSchemaResources # type: ignore
+ from ._models import KeyVaultProperties # type: ignore
+ from ._models import Kubernetes # type: ignore
+ from ._models import KubernetesProperties # type: ignore
+ from ._models import KubernetesSchema # type: ignore
+ from ._models import ListAmlUserFeatureResult # type: ignore
+ from ._models import ListNotebookKeysResult # type: ignore
+ from ._models import ListStorageAccountKeysResult # type: ignore
+ from ._models import ListUsagesResult # type: ignore
+ from ._models import ListWorkspaceKeysResult # type: ignore
+ from ._models import ListWorkspaceQuotas # type: ignore
+ from ._models import NodeStateCounts # type: ignore
+ from ._models import NotebookAccessTokenResult # type: ignore
+ from ._models import NotebookPreparationError # type: ignore
+ from ._models import NotebookResourceInfo # type: ignore
+ from ._models import Operation # type: ignore
+ from ._models import OperationDisplay # type: ignore
+ from ._models import OperationListResult # type: ignore
+ from ._models import PaginatedComputeResourcesList # type: ignore
+ from ._models import PaginatedWorkspaceConnectionsList # type: ignore
+ from ._models import Password # type: ignore
+ from ._models import PersonalComputeInstanceSettings # type: ignore
+ from ._models import PrivateEndpoint # type: ignore
+ from ._models import PrivateEndpointConnection # type: ignore
+ from ._models import PrivateEndpointConnectionListResult # type: ignore
+ from ._models import PrivateLinkResource # type: ignore
+ from ._models import PrivateLinkResourceListResult # type: ignore
+ from ._models import PrivateLinkServiceConnectionState # type: ignore
+ from ._models import QuotaBaseProperties # type: ignore
+ from ._models import QuotaUpdateParameters # type: ignore
+ from ._models import RegistryListCredentialsResult # type: ignore
+ from ._models import Resource # type: ignore
+ from ._models import ResourceId # type: ignore
+ from ._models import ResourceName # type: ignore
+ from ._models import ResourceQuota # type: ignore
+ from ._models import ResourceSkuLocationInfo # type: ignore
+ from ._models import ResourceSkuZoneDetails # type: ignore
+ from ._models import Restriction # type: ignore
+ from ._models import ScaleSettings # type: ignore
+ from ._models import ScaleSettingsInformation # type: ignore
+ from ._models import ScriptReference # type: ignore
+ from ._models import ScriptsToExecute # type: ignore
+ from ._models import ServiceManagedResourcesSettings # type: ignore
+ from ._models import ServicePrincipalCredentials # type: ignore
+ from ._models import SetupScripts # type: ignore
+ from ._models import SharedPrivateLinkResource # type: ignore
+ from ._models import Sku # type: ignore
+ from ._models import SkuCapability # type: ignore
+ from ._models import SkuListResult # type: ignore
+ from ._models import SslConfiguration # type: ignore
+ from ._models import SynapseSpark # type: ignore
+ from ._models import SynapseSparkProperties # type: ignore
+ from ._models import SystemData # type: ignore
+ from ._models import SystemService # type: ignore
+ from ._models import UpdateWorkspaceQuotas # type: ignore
+ from ._models import UpdateWorkspaceQuotasResult # type: ignore
+ from ._models import Usage # type: ignore
+ from ._models import UsageName # type: ignore
+ from ._models import UserAccountCredentials # type: ignore
+ from ._models import UserAssignedIdentity # type: ignore
+ from ._models import VirtualMachine # type: ignore
+ from ._models import VirtualMachineImage # type: ignore
+ from ._models import VirtualMachineProperties # type: ignore
+ from ._models import VirtualMachineSecrets # type: ignore
+ from ._models import VirtualMachineSize # type: ignore
+ from ._models import VirtualMachineSizeListResult # type: ignore
+ from ._models import VirtualMachineSshCredentials # type: ignore
+ from ._models import Workspace # type: ignore
+ from ._models import WorkspaceConnection # type: ignore
+ from ._models import WorkspaceListResult # type: ignore
+ from ._models import WorkspaceSku # type: ignore
+ from ._models import WorkspaceUpdateParameters # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+ AllocationState,
+ ApplicationSharingPolicy,
+ BillingCurrency,
+ ClusterPurpose,
+ ComputeInstanceAuthorizationType,
+ ComputeInstanceState,
+ ComputeType,
+ CreatedByType,
+ DiagnoseResultLevel,
+ EncryptionStatus,
+ LoadBalancerType,
+ NodeState,
+ OperationName,
+ OperationStatus,
+ OsType,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
+ ProvisioningState,
+ PublicNetworkAccess,
+ QuotaUnit,
+ ReasonCode,
+ RemoteLoginPortPublicAccess,
+ ResourceIdentityType,
+ SshPublicAccess,
+ SslConfigurationStatus,
+ Status,
+ UnderlyingResourceAction,
+ UnitOfMeasure,
+ UsageUnit,
+ ValueFormat,
+ VmPriceOsType,
+ VmPriority,
+ VmTier,
+)
+
+__all__ = [
+ 'Aks',
+ 'AksComputeSecrets',
+ 'AksComputeSecretsProperties',
+ 'AksNetworkingConfiguration',
+ 'AksProperties',
+ 'AmlCompute',
+ 'AmlComputeNodeInformation',
+ 'AmlComputeNodesInformation',
+ 'AmlComputeProperties',
+ 'AmlUserFeature',
+ 'AssignedUser',
+ 'AutoPauseProperties',
+ 'AutoScaleProperties',
+ 'ClusterUpdateParameters',
+ 'Components1D3SwueSchemasComputeresourceAllof1',
+ 'Compute',
+ 'ComputeInstance',
+ 'ComputeInstanceApplication',
+ 'ComputeInstanceConnectivityEndpoints',
+ 'ComputeInstanceCreatedBy',
+ 'ComputeInstanceLastOperation',
+ 'ComputeInstanceProperties',
+ 'ComputeInstanceSshSettings',
+ 'ComputeResource',
+ 'ComputeSecrets',
+ 'ContainerResourceRequirements',
+ 'CosmosDbSettings',
+ 'DataFactory',
+ 'DataLakeAnalytics',
+ 'DataLakeAnalyticsProperties',
+ 'Databricks',
+ 'DatabricksComputeSecrets',
+ 'DatabricksComputeSecretsProperties',
+ 'DatabricksProperties',
+ 'DiagnoseRequestProperties',
+ 'DiagnoseResponseResult',
+ 'DiagnoseResponseResultValue',
+ 'DiagnoseResult',
+ 'DiagnoseWorkspaceParameters',
+ 'EncryptionProperty',
+ 'ErrorAdditionalInfo',
+ 'ErrorDetail',
+ 'ErrorResponse',
+ 'EstimatedVmPrice',
+ 'EstimatedVmPrices',
+ 'ExternalFqdnResponse',
+ 'FqdnEndpoint',
+ 'FqdnEndpointDetail',
+ 'FqdnEndpoints',
+ 'FqdnEndpointsProperties',
+ 'HdInsight',
+ 'HdInsightProperties',
+ 'Identity',
+ 'IdentityForCmk',
+ 'InstanceTypeSchema',
+ 'InstanceTypeSchemaResources',
+ 'KeyVaultProperties',
+ 'Kubernetes',
+ 'KubernetesProperties',
+ 'KubernetesSchema',
+ 'ListAmlUserFeatureResult',
+ 'ListNotebookKeysResult',
+ 'ListStorageAccountKeysResult',
+ 'ListUsagesResult',
+ 'ListWorkspaceKeysResult',
+ 'ListWorkspaceQuotas',
+ 'NodeStateCounts',
+ 'NotebookAccessTokenResult',
+ 'NotebookPreparationError',
+ 'NotebookResourceInfo',
+ 'Operation',
+ 'OperationDisplay',
+ 'OperationListResult',
+ 'PaginatedComputeResourcesList',
+ 'PaginatedWorkspaceConnectionsList',
+ 'Password',
+ 'PersonalComputeInstanceSettings',
+ 'PrivateEndpoint',
+ 'PrivateEndpointConnection',
+ 'PrivateEndpointConnectionListResult',
+ 'PrivateLinkResource',
+ 'PrivateLinkResourceListResult',
+ 'PrivateLinkServiceConnectionState',
+ 'QuotaBaseProperties',
+ 'QuotaUpdateParameters',
+ 'RegistryListCredentialsResult',
+ 'Resource',
+ 'ResourceId',
+ 'ResourceName',
+ 'ResourceQuota',
+ 'ResourceSkuLocationInfo',
+ 'ResourceSkuZoneDetails',
+ 'Restriction',
+ 'ScaleSettings',
+ 'ScaleSettingsInformation',
+ 'ScriptReference',
+ 'ScriptsToExecute',
+ 'ServiceManagedResourcesSettings',
+ 'ServicePrincipalCredentials',
+ 'SetupScripts',
+ 'SharedPrivateLinkResource',
+ 'Sku',
+ 'SkuCapability',
+ 'SkuListResult',
+ 'SslConfiguration',
+ 'SynapseSpark',
+ 'SynapseSparkProperties',
+ 'SystemData',
+ 'SystemService',
+ 'UpdateWorkspaceQuotas',
+ 'UpdateWorkspaceQuotasResult',
+ 'Usage',
+ 'UsageName',
+ 'UserAccountCredentials',
+ 'UserAssignedIdentity',
+ 'VirtualMachine',
+ 'VirtualMachineImage',
+ 'VirtualMachineProperties',
+ 'VirtualMachineSecrets',
+ 'VirtualMachineSize',
+ 'VirtualMachineSizeListResult',
+ 'VirtualMachineSshCredentials',
+ 'Workspace',
+ 'WorkspaceConnection',
+ 'WorkspaceListResult',
+ 'WorkspaceSku',
+ 'WorkspaceUpdateParameters',
+ 'AllocationState',
+ 'ApplicationSharingPolicy',
+ 'BillingCurrency',
+ 'ClusterPurpose',
+ 'ComputeInstanceAuthorizationType',
+ 'ComputeInstanceState',
+ 'ComputeType',
+ 'CreatedByType',
+ 'DiagnoseResultLevel',
+ 'EncryptionStatus',
+ 'LoadBalancerType',
+ 'NodeState',
+ 'OperationName',
+ 'OperationStatus',
+ 'OsType',
+ 'PrivateEndpointConnectionProvisioningState',
+ 'PrivateEndpointServiceConnectionStatus',
+ 'ProvisioningState',
+ 'PublicNetworkAccess',
+ 'QuotaUnit',
+ 'ReasonCode',
+ 'RemoteLoginPortPublicAccess',
+ 'ResourceIdentityType',
+ 'SshPublicAccess',
+ 'SslConfigurationStatus',
+ 'Status',
+ 'UnderlyingResourceAction',
+ 'UnitOfMeasure',
+ 'UsageUnit',
+ 'ValueFormat',
+ 'VmPriceOsType',
+ 'VmPriority',
+ 'VmTier',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000000..d19373ff219
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,326 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Allocation state of the compute. Possible values are: steady - Indicates that the compute is
+ not resizing. There are no changes to the number of compute nodes in the compute in progress. A
+ compute enters this state when it is created and when no operations are being performed on the
+ compute to change the number of compute nodes. resizing - Indicates that the compute is
+ resizing; that is, compute nodes are being added to or removed from the compute.
+ """
+
+ STEADY = "Steady"
+ RESIZING = "Resizing"
+
+class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Policy for sharing applications on this compute instance among users of parent workspace. If
+ Personal, only the creator can access applications on this compute instance. When Shared, any
+ workspace user can access applications on this instance depending on his/her assigned role.
+ """
+
+ PERSONAL = "Personal"
+ SHARED = "Shared"
+
+class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Three lettered code specifying the currency of the VM price. Example: USD
+ """
+
+ USD = "USD"
+
+class ClusterPurpose(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Intended usage of the cluster
+ """
+
+ FAST_PROD = "FastProd"
+ DENSE_PROD = "DenseProd"
+ DEV_TEST = "DevTest"
+
+class ComputeInstanceAuthorizationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The Compute Instance Authorization type. Available values are personal (default).
+ """
+
+ PERSONAL = "personal"
+
+class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Current state of an ComputeInstance.
+ """
+
+ CREATING = "Creating"
+ CREATE_FAILED = "CreateFailed"
+ DELETING = "Deleting"
+ RUNNING = "Running"
+ RESTARTING = "Restarting"
+ JOB_RUNNING = "JobRunning"
+ SETTING_UP = "SettingUp"
+ SETUP_FAILED = "SetupFailed"
+ STARTING = "Starting"
+ STOPPED = "Stopped"
+ STOPPING = "Stopping"
+ USER_SETTING_UP = "UserSettingUp"
+ USER_SETUP_FAILED = "UserSetupFailed"
+ UNKNOWN = "Unknown"
+ UNUSABLE = "Unusable"
+
+class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of compute
+ """
+
+ AKS = "AKS"
+ KUBERNETES = "Kubernetes"
+ AML_COMPUTE = "AmlCompute"
+ COMPUTE_INSTANCE = "ComputeInstance"
+ DATA_FACTORY = "DataFactory"
+ VIRTUAL_MACHINE = "VirtualMachine"
+ HD_INSIGHT = "HDInsight"
+ DATABRICKS = "Databricks"
+ DATA_LAKE_ANALYTICS = "DataLakeAnalytics"
+ SYNAPSE_SPARK = "SynapseSpark"
+
+class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of identity that created the resource.
+ """
+
+ USER = "User"
+ APPLICATION = "Application"
+ MANAGED_IDENTITY = "ManagedIdentity"
+ KEY = "Key"
+
+class DiagnoseResultLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Level of workspace setup error
+ """
+
+ WARNING = "Warning"
+ ERROR = "Error"
+ INFORMATION = "Information"
+
+class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Indicates whether or not the encryption is enabled for the workspace.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class LoadBalancerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Load Balancer Type
+ """
+
+ PUBLIC_IP = "PublicIp"
+ INTERNAL_LOAD_BALANCER = "InternalLoadBalancer"
+
+class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the compute node. Values are idle, running, preparing, unusable, leaving and
+ preempted.
+ """
+
+ IDLE = "idle"
+ RUNNING = "running"
+ PREPARING = "preparing"
+ UNUSABLE = "unusable"
+ LEAVING = "leaving"
+ PREEMPTED = "preempted"
+
+class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Name of the last operation.
+ """
+
+ CREATE = "Create"
+ START = "Start"
+ STOP = "Stop"
+ RESTART = "Restart"
+ REIMAGE = "Reimage"
+ DELETE = "Delete"
+
+class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operation status.
+ """
+
+ IN_PROGRESS = "InProgress"
+ SUCCEEDED = "Succeeded"
+ CREATE_FAILED = "CreateFailed"
+ START_FAILED = "StartFailed"
+ STOP_FAILED = "StopFailed"
+ RESTART_FAILED = "RestartFailed"
+ REIMAGE_FAILED = "ReimageFailed"
+ DELETE_FAILED = "DeleteFailed"
+
+class OsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Compute OS Type
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current provisioning state.
+ """
+
+ SUCCEEDED = "Succeeded"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ FAILED = "Failed"
+
+class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The private endpoint connection status.
+ """
+
+ PENDING = "Pending"
+ APPROVED = "Approved"
+ REJECTED = "Rejected"
+ DISCONNECTED = "Disconnected"
+ TIMEOUT = "Timeout"
+
+class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current deployment state of workspace resource. The provisioningState is to indicate states
+ for resource provisioning.
+ """
+
+ UNKNOWN = "Unknown"
+ UPDATING = "Updating"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Whether requests from Public Network are allowed.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of quota measurement.
+ """
+
+ COUNT = "Count"
+
+class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The reason for the restriction.
+ """
+
+ NOT_SPECIFIED = "NotSpecified"
+ NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion"
+ NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription"
+
+class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is
+ open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed
+ on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be
+ default only during cluster creation time, after creation it will be either enabled or
+ disabled.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+ NOT_SPECIFIED = "NotSpecified"
+
+class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The identity type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ NONE = "None"
+
+class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on this instance. Enabled - Indicates that the public ssh port is open and
+ accessible according to the VNet/subnet policy if applicable.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enable or disable ssl for scoring
+ """
+
+ DISABLED = "Disabled"
+ ENABLED = "Enabled"
+ AUTO = "Auto"
+
+class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Status of update workspace quota.
+ """
+
+ UNDEFINED = "Undefined"
+ SUCCESS = "Success"
+ FAILURE = "Failure"
+ INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum"
+ INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit"
+ INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName"
+ OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku"
+ OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion"
+
+class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ DELETE = "Delete"
+ DETACH = "Detach"
+
+class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The unit of time measurement for the specified VM price. Example: OneHour
+ """
+
+ ONE_HOUR = "OneHour"
+
+class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of usage measurement.
+ """
+
+ COUNT = "Count"
+
+class ValueFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """format for the workspace connection value
+ """
+
+ JSON = "JSON"
+
+class VmPriceOsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operating system type used by the VM.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Virtual Machine priority
+ """
+
+ DEDICATED = "Dedicated"
+ LOW_PRIORITY = "LowPriority"
+
+class VmTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the VM.
+ """
+
+ STANDARD = "Standard"
+ LOW_PRIORITY = "LowPriority"
+ SPOT = "Spot"
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
new file mode 100644
index 00000000000..533f23d4034
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
@@ -0,0 +1,4906 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, Kubernetes, SynapseSpark, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'Kubernetes': 'Kubernetes', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = kwargs.get('disable_local_auth', None)
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Aks, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class AksComputeSecretsProperties(msrest.serialization.Model):
+ """Properties of AksComputeSecrets.
+
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _attribute_map = {
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeSecretsProperties, self).__init__(**kwargs)
+ self.user_kube_config = kwargs.get('user_kube_config', None)
+ self.admin_kube_config = kwargs.get('admin_kube_config', None)
+ self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None)
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.user_kube_config = kwargs.get('user_kube_config', None)
+ self.admin_kube_config = kwargs.get('admin_kube_config', None)
+ self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None)
+ self.compute_type = 'AKS' # type: str
+ self.compute_type = 'AKS' # type: str
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = kwargs.get('subnet_id', None)
+ self.service_cidr = kwargs.get('service_cidr', None)
+ self.dns_service_ip = kwargs.get('dns_service_ip', None)
+ self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None)
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd",
+ "DenseProd", "DevTest". Default value: "FastProd".
+ :type cluster_purpose: str or ~azure_machine_learning_workspaces.models.ClusterPurpose
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ :param load_balancer_type: Load Balancer Type. Possible values include: "PublicIp",
+ "InternalLoadBalancer". Default value: "PublicIp".
+ :type load_balancer_type: str or ~azure_machine_learning_workspaces.models.LoadBalancerType
+ :param load_balancer_subnet: Load Balancer Subnet.
+ :type load_balancer_subnet: str
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'},
+ 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = kwargs.get('cluster_fqdn', None)
+ self.system_services = None
+ self.agent_count = kwargs.get('agent_count', None)
+ self.agent_vm_size = kwargs.get('agent_vm_size', None)
+ self.cluster_purpose = kwargs.get('cluster_purpose', "FastProd")
+ self.ssl_configuration = kwargs.get('ssl_configuration', None)
+ self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None)
+ self.load_balancer_type = kwargs.get('load_balancer_type', "PublicIp")
+ self.load_balancer_subnet = kwargs.get('load_balancer_subnet', None)
+
+
+class AmlCompute(Compute):
+ """Properties(top level) of AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Properties of AmlCompute.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class AmlComputeNodesInformation(msrest.serialization.Model):
+ """Result of AmlCompute Nodes.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'nodes': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.nodes = None
+ self.next_link = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = kwargs.get('os_type', "Linux")
+ self.vm_size = kwargs.get('vm_size', None)
+ self.vm_priority = kwargs.get('vm_priority', None)
+ self.virtual_machine_image = kwargs.get('virtual_machine_image', None)
+ self.isolated_network = kwargs.get('isolated_network', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.user_account_credentials = kwargs.get('user_account_credentials', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified")
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True)
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.display_name = kwargs.get('display_name', None)
+ self.description = kwargs.get('description', None)
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = kwargs['object_id']
+ self.tenant_id = kwargs['tenant_id']
+
+
+class AutoPauseProperties(msrest.serialization.Model):
+ """Auto pause properties.
+
+ :param delay_in_minutes:
+ :type delay_in_minutes: int
+ :param enabled:
+ :type enabled: bool
+ """
+
+ _attribute_map = {
+ 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoPauseProperties, self).__init__(**kwargs)
+ self.delay_in_minutes = kwargs.get('delay_in_minutes', None)
+ self.enabled = kwargs.get('enabled', None)
+
+
+class AutoScaleProperties(msrest.serialization.Model):
+ """Auto scale properties.
+
+ :param min_node_count:
+ :type min_node_count: int
+ :param enabled:
+ :type enabled: bool
+ :param max_node_count:
+ :type max_node_count: int
+ """
+
+ _attribute_map = {
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoScaleProperties, self).__init__(**kwargs)
+ self.min_node_count = kwargs.get('min_node_count', None)
+ self.enabled = kwargs.get('enabled', None)
+ self.max_node_count = kwargs.get('max_node_count', None)
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param properties: Properties of ClusterUpdate.
+ :type properties: ~azure_machine_learning_workspaces.models.ScaleSettingsInformation
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties.properties', 'type': 'ScaleSettingsInformation'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class Components1D3SwueSchemasComputeresourceAllof1(msrest.serialization.Model):
+ """Components1D3SwueSchemasComputeresourceAllof1.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Components1D3SwueSchemasComputeresourceAllof1, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeInstance(Compute):
+ """Properties(top level) of ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Properties of ComputeInstance.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(**kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.endpoint_uri = kwargs.get('endpoint_uri', None)
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = kwargs.get('operation_name', None)
+ self.operation_time = kwargs.get('operation_time', None)
+ self.operation_status = kwargs.get('operation_status', None)
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = kwargs.get('vm_size', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared")
+ self.ssh_settings = kwargs.get('ssh_settings', None)
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal")
+ self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None)
+ self.setup_scripts = kwargs.get('setup_scripts', None)
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled")
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = kwargs.get('admin_public_key', None)
+
+
+class Resource(msrest.serialization.Model):
+ """Common fields that are returned in the response for all Azure Resource Manager resources.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+
+
+class ComputeResource(Resource, Components1D3SwueSchemasComputeresourceAllof1):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.id = None
+ self.name = None
+ self.type = None
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The minimum amount of CPU cores to be used by the container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu: float
+ :param cpu_limit: The maximum amount of CPU cores allowed to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu_limit: float
+ :param memory_in_gb: The minimum amount of memory (in GB) to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb: float
+ :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to be used by the
+ container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb_limit: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = kwargs.get('cpu', None)
+ self.cpu_limit = kwargs.get('cpu_limit', None)
+ self.memory_in_gb = kwargs.get('memory_in_gb', None)
+ self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None)
+ self.gpu = kwargs.get('gpu', None)
+ self.fpga = kwargs.get('fpga', None)
+
+
+class CosmosDbSettings(msrest.serialization.Model):
+ """CosmosDbSettings.
+
+ :param collections_throughput: The throughput of the collections in cosmosdb database.
+ :type collections_throughput: int
+ """
+
+ _attribute_map = {
+ 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CosmosDbSettings, self).__init__(**kwargs)
+ self.collections_throughput = kwargs.get('collections_throughput', None)
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Properties of Databricks.
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DatabricksComputeSecretsProperties(msrest.serialization.Model):
+ """Properties of Databricks Compute Secrets.
+
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksComputeSecretsProperties, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsProperties):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+ self.compute_type = 'Databricks' # type: str
+ self.compute_type = 'Databricks' # type: str
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """Properties of Databricks.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ :param workspace_url: Workspace Url.
+ :type workspace_url: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+ self.workspace_url = kwargs.get('workspace_url', None)
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(**kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(**kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None)
+
+
+class DiagnoseRequestProperties(msrest.serialization.Model):
+ """DiagnoseRequestProperties.
+
+ :param udr: Setting for diagnosing user defined routing.
+ :type udr: dict[str, object]
+ :param nsg: Setting for diagnosing network security group.
+ :type nsg: dict[str, object]
+ :param resource_lock: Setting for diagnosing resource lock.
+ :type resource_lock: dict[str, object]
+ :param dns_resolution: Setting for diagnosing dns resolution.
+ :type dns_resolution: dict[str, object]
+ :param storage_account: Setting for diagnosing dependent storage account.
+ :type storage_account: dict[str, object]
+ :param key_vault: Setting for diagnosing dependent key vault.
+ :type key_vault: dict[str, object]
+ :param container_registry: Setting for diagnosing dependent container registry.
+ :type container_registry: dict[str, object]
+ :param application_insights: Setting for diagnosing dependent application insights.
+ :type application_insights: dict[str, object]
+ :param others: Setting for diagnosing unclassified category of problems.
+ :type others: dict[str, object]
+ """
+
+ _attribute_map = {
+ 'udr': {'key': 'udr', 'type': '{object}'},
+ 'nsg': {'key': 'nsg', 'type': '{object}'},
+ 'resource_lock': {'key': 'resourceLock', 'type': '{object}'},
+ 'dns_resolution': {'key': 'dnsResolution', 'type': '{object}'},
+ 'storage_account': {'key': 'storageAccount', 'type': '{object}'},
+ 'key_vault': {'key': 'keyVault', 'type': '{object}'},
+ 'container_registry': {'key': 'containerRegistry', 'type': '{object}'},
+ 'application_insights': {'key': 'applicationInsights', 'type': '{object}'},
+ 'others': {'key': 'others', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DiagnoseRequestProperties, self).__init__(**kwargs)
+ self.udr = kwargs.get('udr', None)
+ self.nsg = kwargs.get('nsg', None)
+ self.resource_lock = kwargs.get('resource_lock', None)
+ self.dns_resolution = kwargs.get('dns_resolution', None)
+ self.storage_account = kwargs.get('storage_account', None)
+ self.key_vault = kwargs.get('key_vault', None)
+ self.container_registry = kwargs.get('container_registry', None)
+ self.application_insights = kwargs.get('application_insights', None)
+ self.others = kwargs.get('others', None)
+
+
+class DiagnoseResponseResult(msrest.serialization.Model):
+ """DiagnoseResponseResult.
+
+ :param value:
+ :type value: ~azure_machine_learning_workspaces.models.DiagnoseResponseResultValue
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'DiagnoseResponseResultValue'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DiagnoseResponseResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class DiagnoseResponseResultValue(msrest.serialization.Model):
+ """DiagnoseResponseResultValue.
+
+ :param user_defined_route_results:
+ :type user_defined_route_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param network_security_rule_results:
+ :type network_security_rule_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param resource_lock_results:
+ :type resource_lock_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param dns_resolution_results:
+ :type dns_resolution_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param storage_account_results:
+ :type storage_account_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param key_vault_results:
+ :type key_vault_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param container_registry_results:
+ :type container_registry_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param application_insights_results:
+ :type application_insights_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param other_results:
+ :type other_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ """
+
+ _attribute_map = {
+ 'user_defined_route_results': {'key': 'userDefinedRouteResults', 'type': '[DiagnoseResult]'},
+ 'network_security_rule_results': {'key': 'networkSecurityRuleResults', 'type': '[DiagnoseResult]'},
+ 'resource_lock_results': {'key': 'resourceLockResults', 'type': '[DiagnoseResult]'},
+ 'dns_resolution_results': {'key': 'dnsResolutionResults', 'type': '[DiagnoseResult]'},
+ 'storage_account_results': {'key': 'storageAccountResults', 'type': '[DiagnoseResult]'},
+ 'key_vault_results': {'key': 'keyVaultResults', 'type': '[DiagnoseResult]'},
+ 'container_registry_results': {'key': 'containerRegistryResults', 'type': '[DiagnoseResult]'},
+ 'application_insights_results': {'key': 'applicationInsightsResults', 'type': '[DiagnoseResult]'},
+ 'other_results': {'key': 'otherResults', 'type': '[DiagnoseResult]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DiagnoseResponseResultValue, self).__init__(**kwargs)
+ self.user_defined_route_results = kwargs.get('user_defined_route_results', None)
+ self.network_security_rule_results = kwargs.get('network_security_rule_results', None)
+ self.resource_lock_results = kwargs.get('resource_lock_results', None)
+ self.dns_resolution_results = kwargs.get('dns_resolution_results', None)
+ self.storage_account_results = kwargs.get('storage_account_results', None)
+ self.key_vault_results = kwargs.get('key_vault_results', None)
+ self.container_registry_results = kwargs.get('container_registry_results', None)
+ self.application_insights_results = kwargs.get('application_insights_results', None)
+ self.other_results = kwargs.get('other_results', None)
+
+
+class DiagnoseResult(msrest.serialization.Model):
+ """Result of Diagnose.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Code for workspace setup error.
+ :vartype code: str
+ :ivar level: Level of workspace setup error. Possible values include: "Warning", "Error",
+ "Information".
+ :vartype level: str or ~azure_machine_learning_workspaces.models.DiagnoseResultLevel
+ :ivar message: Message of workspace setup error.
+ :vartype message: str
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'level': {'readonly': True},
+ 'message': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DiagnoseResult, self).__init__(**kwargs)
+ self.code = None
+ self.level = None
+ self.message = None
+
+
+class DiagnoseWorkspaceParameters(msrest.serialization.Model):
+ """Parameters to diagnose a workspace.
+
+ :param value: Value of Parameters.
+ :type value: ~azure_machine_learning_workspaces.models.DiagnoseRequestProperties
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'DiagnoseRequestProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DiagnoseWorkspaceParameters, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param identity: The identity that will be used to access the key vault for encryption at rest.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityForCmk
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityForCmk'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = kwargs['status']
+ self.identity = kwargs.get('identity', None)
+ self.key_vault_properties = kwargs['key_vault_properties']
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+ """The resource management error additional info.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The additional info type.
+ :vartype type: str
+ :ivar info: The additional info.
+ :vartype info: object
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'info': {'key': 'info', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorAdditionalInfo, self).__init__(**kwargs)
+ self.type = None
+ self.info = None
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """The error detail.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: The error code.
+ :vartype code: str
+ :ivar message: The error message.
+ :vartype message: str
+ :ivar target: The error target.
+ :vartype target: str
+ :ivar details: The error details.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ :ivar additional_info: The error additional info.
+ :vartype additional_info: list[~azure_machine_learning_workspaces.models.ErrorAdditionalInfo]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'target': {'readonly': True},
+ 'details': {'readonly': True},
+ 'additional_info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+ self.additional_info = None
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).
+
+ :param error: The error object.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorDetail
+ """
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorDetail'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.error = kwargs.get('error', None)
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = kwargs['retail_price']
+ self.os_type = kwargs['os_type']
+ self.vm_tier = kwargs['vm_tier']
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = kwargs['billing_currency']
+ self.unit_of_measure = kwargs['unit_of_measure']
+ self.values = kwargs['values']
+
+
+class ExternalFqdnResponse(msrest.serialization.Model):
+ """ExternalFqdnResponse.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.FqdnEndpoints]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[FqdnEndpoints]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ExternalFqdnResponse, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class FqdnEndpoint(msrest.serialization.Model):
+ """FqdnEndpoint.
+
+ :param domain_name:
+ :type domain_name: str
+ :param endpoint_details:
+ :type endpoint_details: list[~azure_machine_learning_workspaces.models.FqdnEndpointDetail]
+ """
+
+ _attribute_map = {
+ 'domain_name': {'key': 'domainName', 'type': 'str'},
+ 'endpoint_details': {'key': 'endpointDetails', 'type': '[FqdnEndpointDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FqdnEndpoint, self).__init__(**kwargs)
+ self.domain_name = kwargs.get('domain_name', None)
+ self.endpoint_details = kwargs.get('endpoint_details', None)
+
+
+class FqdnEndpointDetail(msrest.serialization.Model):
+ """FqdnEndpointDetail.
+
+ :param port:
+ :type port: int
+ """
+
+ _attribute_map = {
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FqdnEndpointDetail, self).__init__(**kwargs)
+ self.port = kwargs.get('port', None)
+
+
+class FqdnEndpoints(msrest.serialization.Model):
+ """FqdnEndpoints.
+
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.FqdnEndpointsProperties
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'FqdnEndpointsProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FqdnEndpoints, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class FqdnEndpointsProperties(msrest.serialization.Model):
+ """FqdnEndpointsProperties.
+
+ :param category:
+ :type category: str
+ :param endpoints:
+ :type endpoints: list[~azure_machine_learning_workspaces.models.FqdnEndpoint]
+ """
+
+ _attribute_map = {
+ 'category': {'key': 'category', 'type': 'str'},
+ 'endpoints': {'key': 'endpoints', 'type': '[FqdnEndpoint]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(FqdnEndpointsProperties, self).__init__(**kwargs)
+ self.category = kwargs.get('category', None)
+ self.endpoints = kwargs.get('endpoints', None)
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: HDInsight compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(**kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HDInsight compute properties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = kwargs.get('type', None)
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class IdentityForCmk(msrest.serialization.Model):
+ """Identity that will be used to access key vault for encryption at rest.
+
+ :param user_assigned_identity: The ArmId of the user assigned identity that will be used to
+ access the customer managed key vault.
+ :type user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityForCmk, self).__init__(**kwargs)
+ self.user_assigned_identity = kwargs.get('user_assigned_identity', None)
+
+
+class InstanceTypeSchema(msrest.serialization.Model):
+ """Instance type schema.
+
+ :param node_selector: Node Selector.
+ :type node_selector: dict[str, str]
+ :param resources: Resource requests/limits for this instance type.
+ :type resources: ~azure_machine_learning_workspaces.models.InstanceTypeSchemaResources
+ """
+
+ _attribute_map = {
+ 'node_selector': {'key': 'nodeSelector', 'type': '{str}'},
+ 'resources': {'key': 'resources', 'type': 'InstanceTypeSchemaResources'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InstanceTypeSchema, self).__init__(**kwargs)
+ self.node_selector = kwargs.get('node_selector', None)
+ self.resources = kwargs.get('resources', None)
+
+
+class InstanceTypeSchemaResources(msrest.serialization.Model):
+ """Resource requests/limits for this instance type.
+
+ :param requests: Resource requests for this instance type.
+ :type requests: dict[str, str]
+ :param limits: Resource limits for this instance type.
+ :type limits: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'requests': {'key': 'requests', 'type': '{str}'},
+ 'limits': {'key': 'limits', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(InstanceTypeSchemaResources, self).__init__(**kwargs)
+ self.requests = kwargs.get('requests', None)
+ self.limits = kwargs.get('limits', None)
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = kwargs['key_vault_arm_id']
+ self.key_identifier = kwargs['key_identifier']
+ self.identity_client_id = kwargs.get('identity_client_id', None)
+
+
+class KubernetesSchema(msrest.serialization.Model):
+ """Kubernetes Compute Schema.
+
+ :param properties: Properties of Kubernetes.
+ :type properties: ~azure_machine_learning_workspaces.models.KubernetesProperties
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'KubernetesProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KubernetesSchema, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class Kubernetes(Compute, KubernetesSchema):
+ """A Machine Learning compute based on Kubernetes Compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param properties: Properties of Kubernetes.
+ :type properties: ~azure_machine_learning_workspaces.models.KubernetesProperties
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'KubernetesProperties'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Kubernetes, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+ self.compute_type = 'Kubernetes' # type: str
+ self.compute_type = 'Kubernetes' # type: str
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = kwargs.get('disable_local_auth', None)
+
+
+class KubernetesProperties(msrest.serialization.Model):
+ """Kubernetes properties.
+
+ :param relay_connection_string: Relay connection string.
+ :type relay_connection_string: str
+ :param service_bus_connection_string: ServiceBus connection string.
+ :type service_bus_connection_string: str
+ :param extension_principal_id: Extension principal-id.
+ :type extension_principal_id: str
+ :param extension_instance_release_train: Extension instance release train.
+ :type extension_instance_release_train: str
+ :param vc_name: VC name.
+ :type vc_name: str
+ :param namespace: Compute namespace.
+ :type namespace: str
+ :param default_instance_type: Default instance type.
+ :type default_instance_type: str
+ :param instance_types: Instance Type Schema.
+ :type instance_types: dict[str, ~azure_machine_learning_workspaces.models.InstanceTypeSchema]
+ """
+
+ _attribute_map = {
+ 'relay_connection_string': {'key': 'relayConnectionString', 'type': 'str'},
+ 'service_bus_connection_string': {'key': 'serviceBusConnectionString', 'type': 'str'},
+ 'extension_principal_id': {'key': 'extensionPrincipalId', 'type': 'str'},
+ 'extension_instance_release_train': {'key': 'extensionInstanceReleaseTrain', 'type': 'str'},
+ 'vc_name': {'key': 'vcName', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'default_instance_type': {'key': 'defaultInstanceType', 'type': 'str'},
+ 'instance_types': {'key': 'instanceTypes', 'type': '{InstanceTypeSchema}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KubernetesProperties, self).__init__(**kwargs)
+ self.relay_connection_string = kwargs.get('relay_connection_string', None)
+ self.service_bus_connection_string = kwargs.get('service_bus_connection_string', None)
+ self.extension_principal_id = kwargs.get('extension_principal_id', None)
+ self.extension_instance_release_train = kwargs.get('extension_instance_release_train', None)
+ self.vc_name = kwargs.get('vc_name', None)
+ self.namespace = kwargs.get('namespace', "default")
+ self.default_instance_type = kwargs.get('default_instance_type', None)
+ self.instance_types = kwargs.get('instance_types', None)
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListStorageAccountKeysResult(msrest.serialization.Model):
+ """ListStorageAccountKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListStorageAccountKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :ivar notebook_access_keys:
+ :vartype notebook_access_keys: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ 'notebook_access_keys': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookAccessTokenResult(msrest.serialization.Model):
+ """NotebookAccessTokenResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar notebook_resource_id:
+ :vartype notebook_resource_id: str
+ :ivar host_name:
+ :vartype host_name: str
+ :ivar public_dns:
+ :vartype public_dns: str
+ :ivar access_token:
+ :vartype access_token: str
+ :ivar token_type:
+ :vartype token_type: str
+ :ivar expires_in:
+ :vartype expires_in: int
+ :ivar refresh_token:
+ :vartype refresh_token: str
+ :ivar scope:
+ :vartype scope: str
+ """
+
+ _validation = {
+ 'notebook_resource_id': {'readonly': True},
+ 'host_name': {'readonly': True},
+ 'public_dns': {'readonly': True},
+ 'access_token': {'readonly': True},
+ 'token_type': {'readonly': True},
+ 'expires_in': {'readonly': True},
+ 'refresh_token': {'readonly': True},
+ 'scope': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'},
+ 'host_name': {'key': 'hostName', 'type': 'str'},
+ 'public_dns': {'key': 'publicDns', 'type': 'str'},
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expires_in': {'key': 'expiresIn', 'type': 'int'},
+ 'refresh_token': {'key': 'refreshToken', 'type': 'str'},
+ 'scope': {'key': 'scope', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookAccessTokenResult, self).__init__(**kwargs)
+ self.notebook_resource_id = None
+ self.host_name = None
+ self.public_dns = None
+ self.access_token = None
+ self.token_type = None
+ self.expires_in = None
+ self.refresh_token = None
+ self.scope = None
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = kwargs.get('error_message', None)
+ self.status_code = kwargs.get('status_code', None)
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = kwargs.get('fqdn', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None)
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.display = kwargs.get('display', None)
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = kwargs.get('provider', None)
+ self.resource = kwargs.get('resource', None)
+ self.operation = kwargs.get('operation', None)
+ self.description = kwargs.get('description', None)
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = kwargs.get('assigned_user', None)
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to.
+ :vartype subnet_arm_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'subnet_arm_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+ self.subnet_arm_id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.private_endpoint = kwargs.get('private_endpoint', None)
+ self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
+ self.provisioning_state = None
+
+
+class PrivateEndpointConnectionListResult(msrest.serialization.Model):
+ """List of private endpoint connection associated with the specified workspace.
+
+ :param value: Array of private endpoint connections.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = kwargs.get('required_zone_names', None)
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.description = kwargs.get('description', None)
+ self.actions_required = kwargs.get('actions_required', None)
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.type = kwargs.get('type', None)
+ self.limit = kwargs.get('limit', None)
+ self.unit = kwargs.get('unit', None)
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ :param location: Region of workspace quota to be updated.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.location = kwargs.get('location', None)
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = kwargs.get('passwords', None)
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = kwargs.get('reason_code', None)
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = kwargs['max_node_count']
+ self.min_node_count = kwargs.get('min_node_count', 0)
+ self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None)
+
+
+class ScaleSettingsInformation(msrest.serialization.Model):
+ """Desired scale settings for the amlCompute.
+
+ :param scale_settings: scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScaleSettingsInformation, self).__init__(**kwargs)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = kwargs.get('script_source', None)
+ self.script_data = kwargs.get('script_data', None)
+ self.script_arguments = kwargs.get('script_arguments', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = kwargs.get('startup_script', None)
+ self.creation_script = kwargs.get('creation_script', None)
+
+
+class ServiceManagedResourcesSettings(msrest.serialization.Model):
+ """ServiceManagedResourcesSettings.
+
+ :param cosmos_db: The settings for the service managed cosmosdb account.
+ :type cosmos_db: ~azure_machine_learning_workspaces.models.CosmosDbSettings
+ """
+
+ _attribute_map = {
+ 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceManagedResourcesSettings, self).__init__(**kwargs)
+ self.cosmos_db = kwargs.get('cosmos_db', None)
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = kwargs['client_id']
+ self.client_secret = kwargs['client_secret']
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = kwargs.get('scripts', None)
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.private_link_resource_id = kwargs.get('private_link_resource_id', None)
+ self.group_id = kwargs.get('group_id', None)
+ self.request_message = kwargs.get('request_message', None)
+ self.status = kwargs.get('status', None)
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.tier = kwargs.get('tier', None)
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ :param leaf_domain_label: Leaf domain label of public endpoint.
+ :type leaf_domain_label: str
+ :param overwrite_existing_domain: Indicates whether to overwrite existing domain label.
+ :type overwrite_existing_domain: bool
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'},
+ 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.cert = kwargs.get('cert', None)
+ self.key = kwargs.get('key', None)
+ self.cname = kwargs.get('cname', None)
+ self.leaf_domain_label = kwargs.get('leaf_domain_label', None)
+ self.overwrite_existing_domain = kwargs.get('overwrite_existing_domain', None)
+
+
+class SynapseSpark(Compute):
+ """A SynapseSpark compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.SynapseSparkProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSpark, self).__init__(**kwargs)
+ self.compute_type = 'SynapseSpark' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class SynapseSparkProperties(msrest.serialization.Model):
+ """SynapseSparkProperties.
+
+ :param auto_scale_properties: Auto scale properties.
+ :type auto_scale_properties: ~azure_machine_learning_workspaces.models.AutoScaleProperties
+ :param auto_pause_properties: Auto pause properties.
+ :type auto_pause_properties: ~azure_machine_learning_workspaces.models.AutoPauseProperties
+ :param spark_version: Spark version.
+ :type spark_version: str
+ :param node_count: The number of compute nodes currently assigned to the compute.
+ :type node_count: int
+ :param node_size: Node size.
+ :type node_size: str
+ :param node_size_family: Node size family.
+ :type node_size_family: str
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param resource_group: Name of the resource group in which workspace is located.
+ :type resource_group: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param pool_name: Pool name.
+ :type pool_name: str
+ """
+
+ _attribute_map = {
+ 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'},
+ 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'},
+ 'spark_version': {'key': 'sparkVersion', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'node_size': {'key': 'nodeSize', 'type': 'str'},
+ 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'},
+ 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
+ 'resource_group': {'key': 'resourceGroup', 'type': 'str'},
+ 'workspace_name': {'key': 'workspaceName', 'type': 'str'},
+ 'pool_name': {'key': 'poolName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSparkProperties, self).__init__(**kwargs)
+ self.auto_scale_properties = kwargs.get('auto_scale_properties', None)
+ self.auto_pause_properties = kwargs.get('auto_pause_properties', None)
+ self.spark_version = kwargs.get('spark_version', None)
+ self.node_count = kwargs.get('node_count', None)
+ self.node_size = kwargs.get('node_size', None)
+ self.node_size_family = kwargs.get('node_size_family', None)
+ self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs.get('resource_group', None)
+ self.workspace_name = kwargs.get('workspace_name', None)
+ self.pool_name = kwargs.get('pool_name', None)
+
+
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = kwargs.get('created_by', None)
+ self.created_by_type = kwargs.get('created_by_type', None)
+ self.created_at = kwargs.get('created_at', None)
+ self.last_modified_by = kwargs.get('last_modified_by', None)
+ self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
+ self.last_modified_at = kwargs.get('last_modified_at', None)
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = kwargs.get('limit', None)
+ self.unit = None
+ self.status = kwargs.get('status', None)
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = kwargs['admin_user_name']
+ self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None)
+ self.admin_user_password = kwargs.get('admin_user_password', None)
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ :param is_notebook_instance_compute: Indicates whether this compute will be used for running
+ notebooks.
+ :type is_notebook_instance_compute: bool
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = kwargs.get('virtual_machine_size', None)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+ self.is_notebook_instance_compute = kwargs.get('is_notebook_instance_compute', None)
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ :param supported_compute_types: Specifies the compute types supported by the virtual machine
+ size.
+ :type supported_compute_types: list[str]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None)
+ self.supported_compute_types = kwargs.get('supported_compute_types', None)
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param value: The list of virtual machine sizes supported by AmlCompute.
+ :type value: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+ self.public_key_data = kwargs.get('public_key_data', None)
+ self.private_key_data = kwargs.get('private_key_data', None)
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :param public_network_access: Whether requests from Public Network are allowed. Possible values
+ include: "Enabled", "Disabled".
+ :type public_network_access: str or
+ ~azure_machine_learning_workspaces.models.PublicNetworkAccess
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :ivar tenant_id: The tenant id associated with this workspace.
+ :vartype tenant_id: str
+ :ivar storage_hns_enabled: If the storage associated with the workspace has hierarchical
+ namespace(HNS) enabled.
+ :vartype storage_hns_enabled: bool
+ :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow
+ must point at to set up tracking.
+ :vartype ml_flow_tracking_uri: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'storage_hns_enabled': {'readonly': True},
+ 'ml_flow_tracking_uri': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
+ 'storage_hns_enabled': {'key': 'properties.storageHnsEnabled', 'type': 'bool'},
+ 'ml_flow_tracking_uri': {'key': 'properties.mlFlowTrackingUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+ self.workspace_id = None
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.key_vault = kwargs.get('key_vault', None)
+ self.application_insights = kwargs.get('application_insights', None)
+ self.container_registry = kwargs.get('container_registry', None)
+ self.storage_account = kwargs.get('storage_account', None)
+ self.discovery_url = kwargs.get('discovery_url', None)
+ self.provisioning_state = None
+ self.encryption = kwargs.get('encryption', None)
+ self.hbi_workspace = kwargs.get('hbi_workspace', False)
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False)
+ self.public_network_access = kwargs.get('public_network_access', None)
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None)
+ self.notebook_info = None
+ self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None)
+ self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None)
+ self.tenant_id = None
+ self.storage_hns_enabled = None
+ self.ml_flow_tracking_uri = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+ self.value_format = kwargs.get('value_format', None)
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = kwargs.get('restrictions', None)
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :param public_network_access: Whether requests from Public Network are allowed. Possible values
+ include: "Enabled", "Disabled".
+ :type public_network_access: str or
+ ~azure_machine_learning_workspaces.models.PublicNetworkAccess
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.identity = kwargs.get('identity', None)
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None)
+ self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None)
+ self.public_network_access = kwargs.get('public_network_access', None)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
new file mode 100644
index 00000000000..0d49e80681f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
@@ -0,0 +1,5337 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, Kubernetes, SynapseSpark, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'Kubernetes': 'Kubernetes', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = disable_local_auth
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["AksProperties"] = None,
+ **kwargs
+ ):
+ super(Aks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = properties
+
+
+class AksComputeSecretsProperties(msrest.serialization.Model):
+ """Properties of AksComputeSecrets.
+
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _attribute_map = {
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_kube_config: Optional[str] = None,
+ admin_kube_config: Optional[str] = None,
+ image_pull_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeSecretsProperties, self).__init__(**kwargs)
+ self.user_kube_config = user_kube_config
+ self.admin_kube_config = admin_kube_config
+ self.image_pull_secret_name = image_pull_secret_name
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_kube_config: Optional[str] = None,
+ admin_kube_config: Optional[str] = None,
+ image_pull_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(user_kube_config=user_kube_config, admin_kube_config=admin_kube_config, image_pull_secret_name=image_pull_secret_name, **kwargs)
+ self.user_kube_config = user_kube_config
+ self.admin_kube_config = admin_kube_config
+ self.image_pull_secret_name = image_pull_secret_name
+ self.compute_type = 'AKS' # type: str
+ self.compute_type = 'AKS' # type: str
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ subnet_id: Optional[str] = None,
+ service_cidr: Optional[str] = None,
+ dns_service_ip: Optional[str] = None,
+ docker_bridge_cidr: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = subnet_id
+ self.service_cidr = service_cidr
+ self.dns_service_ip = dns_service_ip
+ self.docker_bridge_cidr = docker_bridge_cidr
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd",
+ "DenseProd", "DevTest". Default value: "FastProd".
+ :type cluster_purpose: str or ~azure_machine_learning_workspaces.models.ClusterPurpose
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ :param load_balancer_type: Load Balancer Type. Possible values include: "PublicIp",
+ "InternalLoadBalancer". Default value: "PublicIp".
+ :type load_balancer_type: str or ~azure_machine_learning_workspaces.models.LoadBalancerType
+ :param load_balancer_subnet: Load Balancer Subnet.
+ :type load_balancer_subnet: str
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'},
+ 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cluster_fqdn: Optional[str] = None,
+ agent_count: Optional[int] = None,
+ agent_vm_size: Optional[str] = None,
+ cluster_purpose: Optional[Union[str, "ClusterPurpose"]] = "FastProd",
+ ssl_configuration: Optional["SslConfiguration"] = None,
+ aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None,
+ load_balancer_type: Optional[Union[str, "LoadBalancerType"]] = "PublicIp",
+ load_balancer_subnet: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = cluster_fqdn
+ self.system_services = None
+ self.agent_count = agent_count
+ self.agent_vm_size = agent_vm_size
+ self.cluster_purpose = cluster_purpose
+ self.ssl_configuration = ssl_configuration
+ self.aks_networking_configuration = aks_networking_configuration
+ self.load_balancer_type = load_balancer_type
+ self.load_balancer_subnet = load_balancer_subnet
+
+
+class AmlCompute(Compute):
+ """Properties(top level) of AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Properties of AmlCompute.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["AmlComputeProperties"] = None,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = properties
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class AmlComputeNodesInformation(msrest.serialization.Model):
+ """Result of AmlCompute Nodes.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'nodes': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.nodes = None
+ self.next_link = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ os_type: Optional[Union[str, "OsType"]] = "Linux",
+ vm_size: Optional[str] = None,
+ vm_priority: Optional[Union[str, "VmPriority"]] = None,
+ virtual_machine_image: Optional["VirtualMachineImage"] = None,
+ isolated_network: Optional[bool] = None,
+ scale_settings: Optional["ScaleSettings"] = None,
+ user_account_credentials: Optional["UserAccountCredentials"] = None,
+ subnet: Optional["ResourceId"] = None,
+ remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified",
+ enable_node_public_ip: Optional[bool] = True,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = os_type
+ self.vm_size = vm_size
+ self.vm_priority = vm_priority
+ self.virtual_machine_image = virtual_machine_image
+ self.isolated_network = isolated_network
+ self.scale_settings = scale_settings
+ self.user_account_credentials = user_account_credentials
+ self.subnet = subnet
+ self.remote_login_port_public_access = remote_login_port_public_access
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = enable_node_public_ip
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ display_name: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = id
+ self.display_name = display_name
+ self.description = description
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ object_id: str,
+ tenant_id: str,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = object_id
+ self.tenant_id = tenant_id
+
+
+class AutoPauseProperties(msrest.serialization.Model):
+ """Auto pause properties.
+
+ :param delay_in_minutes:
+ :type delay_in_minutes: int
+ :param enabled:
+ :type enabled: bool
+ """
+
+ _attribute_map = {
+ 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_in_minutes: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AutoPauseProperties, self).__init__(**kwargs)
+ self.delay_in_minutes = delay_in_minutes
+ self.enabled = enabled
+
+
+class AutoScaleProperties(msrest.serialization.Model):
+ """Auto scale properties.
+
+ :param min_node_count:
+ :type min_node_count: int
+ :param enabled:
+ :type enabled: bool
+ :param max_node_count:
+ :type max_node_count: int
+ """
+
+ _attribute_map = {
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ min_node_count: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ max_node_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(AutoScaleProperties, self).__init__(**kwargs)
+ self.min_node_count = min_node_count
+ self.enabled = enabled
+ self.max_node_count = max_node_count
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param properties: Properties of ClusterUpdate.
+ :type properties: ~azure_machine_learning_workspaces.models.ScaleSettingsInformation
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties.properties', 'type': 'ScaleSettingsInformation'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["ScaleSettingsInformation"] = None,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class Components1D3SwueSchemasComputeresourceAllof1(msrest.serialization.Model):
+ """Components1D3SwueSchemasComputeresourceAllof1.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["Compute"] = None,
+ **kwargs
+ ):
+ super(Components1D3SwueSchemasComputeresourceAllof1, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class ComputeInstance(Compute):
+ """Properties(top level) of ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Properties of ComputeInstance.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["ComputeInstanceProperties"] = None,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = properties
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ endpoint_uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.endpoint_uri = endpoint_uri
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operation_name: Optional[Union[str, "OperationName"]] = None,
+ operation_time: Optional[datetime.datetime] = None,
+ operation_status: Optional[Union[str, "OperationStatus"]] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = operation_name
+ self.operation_time = operation_time
+ self.operation_status = operation_status
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[ErrorResponse]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vm_size: Optional[str] = None,
+ subnet: Optional["ResourceId"] = None,
+ application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared",
+ ssh_settings: Optional["ComputeInstanceSshSettings"] = None,
+ compute_instance_authorization_type: Optional[Union[str, "ComputeInstanceAuthorizationType"]] = "personal",
+ personal_compute_instance_settings: Optional["PersonalComputeInstanceSettings"] = None,
+ setup_scripts: Optional["SetupScripts"] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = vm_size
+ self.subnet = subnet
+ self.application_sharing_policy = application_sharing_policy
+ self.ssh_settings = ssh_settings
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = compute_instance_authorization_type
+ self.personal_compute_instance_settings = personal_compute_instance_settings
+ self.setup_scripts = setup_scripts
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled",
+ admin_public_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = ssh_public_access
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = admin_public_key
+
+
+class Resource(msrest.serialization.Model):
+ """Common fields that are returned in the response for all Azure Resource Manager resources.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+
+
+class ComputeResource(Resource, Components1D3SwueSchemasComputeresourceAllof1):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["Compute"] = None,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(properties=properties, **kwargs)
+ self.properties = properties
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.id = None
+ self.name = None
+ self.type = None
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The minimum amount of CPU cores to be used by the container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu: float
+ :param cpu_limit: The maximum amount of CPU cores allowed to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu_limit: float
+ :param memory_in_gb: The minimum amount of memory (in GB) to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb: float
+ :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to be used by the
+ container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb_limit: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cpu: Optional[float] = None,
+ cpu_limit: Optional[float] = None,
+ memory_in_gb: Optional[float] = None,
+ memory_in_gb_limit: Optional[float] = None,
+ gpu: Optional[int] = None,
+ fpga: Optional[int] = None,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = cpu
+ self.cpu_limit = cpu_limit
+ self.memory_in_gb = memory_in_gb
+ self.memory_in_gb_limit = memory_in_gb_limit
+ self.gpu = gpu
+ self.fpga = fpga
+
+
+class CosmosDbSettings(msrest.serialization.Model):
+ """CosmosDbSettings.
+
+ :param collections_throughput: The throughput of the collections in cosmosdb database.
+ :type collections_throughput: int
+ """
+
+ _attribute_map = {
+ 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ collections_throughput: Optional[int] = None,
+ **kwargs
+ ):
+ super(CosmosDbSettings, self).__init__(**kwargs)
+ self.collections_throughput = collections_throughput
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Properties of Databricks.
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["DatabricksProperties"] = None,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = properties
+
+
+class DatabricksComputeSecretsProperties(msrest.serialization.Model):
+ """Properties of Databricks Compute Secrets.
+
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksComputeSecretsProperties, self).__init__(**kwargs)
+ self.databricks_access_token = databricks_access_token
+
+
+class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsProperties):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(databricks_access_token=databricks_access_token, **kwargs)
+ self.databricks_access_token = databricks_access_token
+ self.compute_type = 'Databricks' # type: str
+ self.compute_type = 'Databricks' # type: str
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """Properties of Databricks.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ :param workspace_url: Workspace Url.
+ :type workspace_url: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ workspace_url: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = databricks_access_token
+ self.workspace_url = workspace_url
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["DataLakeAnalyticsProperties"] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = properties
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_lake_store_account_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = data_lake_store_account_name
+
+
+class DiagnoseRequestProperties(msrest.serialization.Model):
+ """DiagnoseRequestProperties.
+
+ :param udr: Setting for diagnosing user defined routing.
+ :type udr: dict[str, object]
+ :param nsg: Setting for diagnosing network security group.
+ :type nsg: dict[str, object]
+ :param resource_lock: Setting for diagnosing resource lock.
+ :type resource_lock: dict[str, object]
+ :param dns_resolution: Setting for diagnosing dns resolution.
+ :type dns_resolution: dict[str, object]
+ :param storage_account: Setting for diagnosing dependent storage account.
+ :type storage_account: dict[str, object]
+ :param key_vault: Setting for diagnosing dependent key vault.
+ :type key_vault: dict[str, object]
+ :param container_registry: Setting for diagnosing dependent container registry.
+ :type container_registry: dict[str, object]
+ :param application_insights: Setting for diagnosing dependent application insights.
+ :type application_insights: dict[str, object]
+ :param others: Setting for diagnosing unclassified category of problems.
+ :type others: dict[str, object]
+ """
+
+ _attribute_map = {
+ 'udr': {'key': 'udr', 'type': '{object}'},
+ 'nsg': {'key': 'nsg', 'type': '{object}'},
+ 'resource_lock': {'key': 'resourceLock', 'type': '{object}'},
+ 'dns_resolution': {'key': 'dnsResolution', 'type': '{object}'},
+ 'storage_account': {'key': 'storageAccount', 'type': '{object}'},
+ 'key_vault': {'key': 'keyVault', 'type': '{object}'},
+ 'container_registry': {'key': 'containerRegistry', 'type': '{object}'},
+ 'application_insights': {'key': 'applicationInsights', 'type': '{object}'},
+ 'others': {'key': 'others', 'type': '{object}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ udr: Optional[Dict[str, object]] = None,
+ nsg: Optional[Dict[str, object]] = None,
+ resource_lock: Optional[Dict[str, object]] = None,
+ dns_resolution: Optional[Dict[str, object]] = None,
+ storage_account: Optional[Dict[str, object]] = None,
+ key_vault: Optional[Dict[str, object]] = None,
+ container_registry: Optional[Dict[str, object]] = None,
+ application_insights: Optional[Dict[str, object]] = None,
+ others: Optional[Dict[str, object]] = None,
+ **kwargs
+ ):
+ super(DiagnoseRequestProperties, self).__init__(**kwargs)
+ self.udr = udr
+ self.nsg = nsg
+ self.resource_lock = resource_lock
+ self.dns_resolution = dns_resolution
+ self.storage_account = storage_account
+ self.key_vault = key_vault
+ self.container_registry = container_registry
+ self.application_insights = application_insights
+ self.others = others
+
+
+class DiagnoseResponseResult(msrest.serialization.Model):
+ """DiagnoseResponseResult.
+
+ :param value:
+ :type value: ~azure_machine_learning_workspaces.models.DiagnoseResponseResultValue
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'DiagnoseResponseResultValue'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional["DiagnoseResponseResultValue"] = None,
+ **kwargs
+ ):
+ super(DiagnoseResponseResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class DiagnoseResponseResultValue(msrest.serialization.Model):
+ """DiagnoseResponseResultValue.
+
+ :param user_defined_route_results:
+ :type user_defined_route_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param network_security_rule_results:
+ :type network_security_rule_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param resource_lock_results:
+ :type resource_lock_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param dns_resolution_results:
+ :type dns_resolution_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param storage_account_results:
+ :type storage_account_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param key_vault_results:
+ :type key_vault_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param container_registry_results:
+ :type container_registry_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param application_insights_results:
+ :type application_insights_results:
+ list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ :param other_results:
+ :type other_results: list[~azure_machine_learning_workspaces.models.DiagnoseResult]
+ """
+
+ _attribute_map = {
+ 'user_defined_route_results': {'key': 'userDefinedRouteResults', 'type': '[DiagnoseResult]'},
+ 'network_security_rule_results': {'key': 'networkSecurityRuleResults', 'type': '[DiagnoseResult]'},
+ 'resource_lock_results': {'key': 'resourceLockResults', 'type': '[DiagnoseResult]'},
+ 'dns_resolution_results': {'key': 'dnsResolutionResults', 'type': '[DiagnoseResult]'},
+ 'storage_account_results': {'key': 'storageAccountResults', 'type': '[DiagnoseResult]'},
+ 'key_vault_results': {'key': 'keyVaultResults', 'type': '[DiagnoseResult]'},
+ 'container_registry_results': {'key': 'containerRegistryResults', 'type': '[DiagnoseResult]'},
+ 'application_insights_results': {'key': 'applicationInsightsResults', 'type': '[DiagnoseResult]'},
+ 'other_results': {'key': 'otherResults', 'type': '[DiagnoseResult]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_defined_route_results: Optional[List["DiagnoseResult"]] = None,
+ network_security_rule_results: Optional[List["DiagnoseResult"]] = None,
+ resource_lock_results: Optional[List["DiagnoseResult"]] = None,
+ dns_resolution_results: Optional[List["DiagnoseResult"]] = None,
+ storage_account_results: Optional[List["DiagnoseResult"]] = None,
+ key_vault_results: Optional[List["DiagnoseResult"]] = None,
+ container_registry_results: Optional[List["DiagnoseResult"]] = None,
+ application_insights_results: Optional[List["DiagnoseResult"]] = None,
+ other_results: Optional[List["DiagnoseResult"]] = None,
+ **kwargs
+ ):
+ super(DiagnoseResponseResultValue, self).__init__(**kwargs)
+ self.user_defined_route_results = user_defined_route_results
+ self.network_security_rule_results = network_security_rule_results
+ self.resource_lock_results = resource_lock_results
+ self.dns_resolution_results = dns_resolution_results
+ self.storage_account_results = storage_account_results
+ self.key_vault_results = key_vault_results
+ self.container_registry_results = container_registry_results
+ self.application_insights_results = application_insights_results
+ self.other_results = other_results
+
+
+class DiagnoseResult(msrest.serialization.Model):
+ """Result of Diagnose.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Code for workspace setup error.
+ :vartype code: str
+ :ivar level: Level of workspace setup error. Possible values include: "Warning", "Error",
+ "Information".
+ :vartype level: str or ~azure_machine_learning_workspaces.models.DiagnoseResultLevel
+ :ivar message: Message of workspace setup error.
+ :vartype message: str
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'level': {'readonly': True},
+ 'message': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'level': {'key': 'level', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DiagnoseResult, self).__init__(**kwargs)
+ self.code = None
+ self.level = None
+ self.message = None
+
+
+class DiagnoseWorkspaceParameters(msrest.serialization.Model):
+ """Parameters to diagnose a workspace.
+
+ :param value: Value of Parameters.
+ :type value: ~azure_machine_learning_workspaces.models.DiagnoseRequestProperties
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'DiagnoseRequestProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional["DiagnoseRequestProperties"] = None,
+ **kwargs
+ ):
+ super(DiagnoseWorkspaceParameters, self).__init__(**kwargs)
+ self.value = value
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param identity: The identity that will be used to access the key vault for encryption at rest.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityForCmk
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityForCmk'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Union[str, "EncryptionStatus"],
+ key_vault_properties: "KeyVaultProperties",
+ identity: Optional["IdentityForCmk"] = None,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = status
+ self.identity = identity
+ self.key_vault_properties = key_vault_properties
+
+
+class ErrorAdditionalInfo(msrest.serialization.Model):
+ """The resource management error additional info.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The additional info type.
+ :vartype type: str
+ :ivar info: The additional info.
+ :vartype info: object
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'info': {'key': 'info', 'type': 'object'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorAdditionalInfo, self).__init__(**kwargs)
+ self.type = None
+ self.info = None
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """The error detail.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: The error code.
+ :vartype code: str
+ :ivar message: The error message.
+ :vartype message: str
+ :ivar target: The error target.
+ :vartype target: str
+ :ivar details: The error details.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ :ivar additional_info: The error additional info.
+ :vartype additional_info: list[~azure_machine_learning_workspaces.models.ErrorAdditionalInfo]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'target': {'readonly': True},
+ 'details': {'readonly': True},
+ 'additional_info': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+ self.additional_info = None
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.).
+
+ :param error: The error object.
+ :type error: ~azure_machine_learning_workspaces.models.ErrorDetail
+ """
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorDetail'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error: Optional["ErrorDetail"] = None,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.error = error
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ retail_price: float,
+ os_type: Union[str, "VmPriceOsType"],
+ vm_tier: Union[str, "VmTier"],
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = retail_price
+ self.os_type = os_type
+ self.vm_tier = vm_tier
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ billing_currency: Union[str, "BillingCurrency"],
+ unit_of_measure: Union[str, "UnitOfMeasure"],
+ values: List["EstimatedVmPrice"],
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = billing_currency
+ self.unit_of_measure = unit_of_measure
+ self.values = values
+
+
+class ExternalFqdnResponse(msrest.serialization.Model):
+ """ExternalFqdnResponse.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.FqdnEndpoints]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[FqdnEndpoints]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["FqdnEndpoints"]] = None,
+ **kwargs
+ ):
+ super(ExternalFqdnResponse, self).__init__(**kwargs)
+ self.value = value
+
+
+class FqdnEndpoint(msrest.serialization.Model):
+ """FqdnEndpoint.
+
+ :param domain_name:
+ :type domain_name: str
+ :param endpoint_details:
+ :type endpoint_details: list[~azure_machine_learning_workspaces.models.FqdnEndpointDetail]
+ """
+
+ _attribute_map = {
+ 'domain_name': {'key': 'domainName', 'type': 'str'},
+ 'endpoint_details': {'key': 'endpointDetails', 'type': '[FqdnEndpointDetail]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ domain_name: Optional[str] = None,
+ endpoint_details: Optional[List["FqdnEndpointDetail"]] = None,
+ **kwargs
+ ):
+ super(FqdnEndpoint, self).__init__(**kwargs)
+ self.domain_name = domain_name
+ self.endpoint_details = endpoint_details
+
+
+class FqdnEndpointDetail(msrest.serialization.Model):
+ """FqdnEndpointDetail.
+
+ :param port:
+ :type port: int
+ """
+
+ _attribute_map = {
+ 'port': {'key': 'port', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ port: Optional[int] = None,
+ **kwargs
+ ):
+ super(FqdnEndpointDetail, self).__init__(**kwargs)
+ self.port = port
+
+
+class FqdnEndpoints(msrest.serialization.Model):
+ """FqdnEndpoints.
+
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.FqdnEndpointsProperties
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'FqdnEndpointsProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["FqdnEndpointsProperties"] = None,
+ **kwargs
+ ):
+ super(FqdnEndpoints, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class FqdnEndpointsProperties(msrest.serialization.Model):
+ """FqdnEndpointsProperties.
+
+ :param category:
+ :type category: str
+ :param endpoints:
+ :type endpoints: list[~azure_machine_learning_workspaces.models.FqdnEndpoint]
+ """
+
+ _attribute_map = {
+ 'category': {'key': 'category', 'type': 'str'},
+ 'endpoints': {'key': 'endpoints', 'type': '[FqdnEndpoint]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ endpoints: Optional[List["FqdnEndpoint"]] = None,
+ **kwargs
+ ):
+ super(FqdnEndpointsProperties, self).__init__(**kwargs)
+ self.category = category
+ self.endpoints = endpoints
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: HDInsight compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["HdInsightProperties"] = None,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = properties
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HDInsight compute properties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Optional[Union[str, "ResourceIdentityType"]] = None,
+ user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
+class IdentityForCmk(msrest.serialization.Model):
+ """Identity that will be used to access key vault for encryption at rest.
+
+ :param user_assigned_identity: The ArmId of the user assigned identity that will be used to
+ access the customer managed key vault.
+ :type user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(IdentityForCmk, self).__init__(**kwargs)
+ self.user_assigned_identity = user_assigned_identity
+
+
+class InstanceTypeSchema(msrest.serialization.Model):
+ """Instance type schema.
+
+ :param node_selector: Node Selector.
+ :type node_selector: dict[str, str]
+ :param resources: Resource requests/limits for this instance type.
+ :type resources: ~azure_machine_learning_workspaces.models.InstanceTypeSchemaResources
+ """
+
+ _attribute_map = {
+ 'node_selector': {'key': 'nodeSelector', 'type': '{str}'},
+ 'resources': {'key': 'resources', 'type': 'InstanceTypeSchemaResources'},
+ }
+
+ def __init__(
+ self,
+ *,
+ node_selector: Optional[Dict[str, str]] = None,
+ resources: Optional["InstanceTypeSchemaResources"] = None,
+ **kwargs
+ ):
+ super(InstanceTypeSchema, self).__init__(**kwargs)
+ self.node_selector = node_selector
+ self.resources = resources
+
+
+class InstanceTypeSchemaResources(msrest.serialization.Model):
+ """Resource requests/limits for this instance type.
+
+ :param requests: Resource requests for this instance type.
+ :type requests: dict[str, str]
+ :param limits: Resource limits for this instance type.
+ :type limits: dict[str, str]
+ """
+
+ _attribute_map = {
+ 'requests': {'key': 'requests', 'type': '{str}'},
+ 'limits': {'key': 'limits', 'type': '{str}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ requests: Optional[Dict[str, str]] = None,
+ limits: Optional[Dict[str, str]] = None,
+ **kwargs
+ ):
+ super(InstanceTypeSchemaResources, self).__init__(**kwargs)
+ self.requests = requests
+ self.limits = limits
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_vault_arm_id: str,
+ key_identifier: str,
+ identity_client_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = key_vault_arm_id
+ self.key_identifier = key_identifier
+ self.identity_client_id = identity_client_id
+
+
+class KubernetesSchema(msrest.serialization.Model):
+ """Kubernetes Compute Schema.
+
+ :param properties: Properties of Kubernetes.
+ :type properties: ~azure_machine_learning_workspaces.models.KubernetesProperties
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'KubernetesProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["KubernetesProperties"] = None,
+ **kwargs
+ ):
+ super(KubernetesSchema, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class Kubernetes(Compute, KubernetesSchema):
+ """A Machine Learning compute based on Kubernetes Compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param properties: Properties of Kubernetes.
+ :type properties: ~azure_machine_learning_workspaces.models.KubernetesProperties
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'KubernetesProperties'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["KubernetesProperties"] = None,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(Kubernetes, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs)
+ self.properties = properties
+ self.compute_type = 'Kubernetes' # type: str
+ self.compute_type = 'Kubernetes' # type: str
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = disable_local_auth
+
+
+class KubernetesProperties(msrest.serialization.Model):
+ """Kubernetes properties.
+
+ :param relay_connection_string: Relay connection string.
+ :type relay_connection_string: str
+ :param service_bus_connection_string: ServiceBus connection string.
+ :type service_bus_connection_string: str
+ :param extension_principal_id: Extension principal-id.
+ :type extension_principal_id: str
+ :param extension_instance_release_train: Extension instance release train.
+ :type extension_instance_release_train: str
+ :param vc_name: VC name.
+ :type vc_name: str
+ :param namespace: Compute namespace.
+ :type namespace: str
+ :param default_instance_type: Default instance type.
+ :type default_instance_type: str
+ :param instance_types: Instance Type Schema.
+ :type instance_types: dict[str, ~azure_machine_learning_workspaces.models.InstanceTypeSchema]
+ """
+
+ _attribute_map = {
+ 'relay_connection_string': {'key': 'relayConnectionString', 'type': 'str'},
+ 'service_bus_connection_string': {'key': 'serviceBusConnectionString', 'type': 'str'},
+ 'extension_principal_id': {'key': 'extensionPrincipalId', 'type': 'str'},
+ 'extension_instance_release_train': {'key': 'extensionInstanceReleaseTrain', 'type': 'str'},
+ 'vc_name': {'key': 'vcName', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'default_instance_type': {'key': 'defaultInstanceType', 'type': 'str'},
+ 'instance_types': {'key': 'instanceTypes', 'type': '{InstanceTypeSchema}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ relay_connection_string: Optional[str] = None,
+ service_bus_connection_string: Optional[str] = None,
+ extension_principal_id: Optional[str] = None,
+ extension_instance_release_train: Optional[str] = None,
+ vc_name: Optional[str] = None,
+ namespace: Optional[str] = "default",
+ default_instance_type: Optional[str] = None,
+ instance_types: Optional[Dict[str, "InstanceTypeSchema"]] = None,
+ **kwargs
+ ):
+ super(KubernetesProperties, self).__init__(**kwargs)
+ self.relay_connection_string = relay_connection_string
+ self.service_bus_connection_string = service_bus_connection_string
+ self.extension_principal_id = extension_principal_id
+ self.extension_instance_release_train = extension_instance_release_train
+ self.vc_name = vc_name
+ self.namespace = namespace
+ self.default_instance_type = default_instance_type
+ self.instance_types = instance_types
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListStorageAccountKeysResult(msrest.serialization.Model):
+ """ListStorageAccountKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListStorageAccountKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :ivar notebook_access_keys:
+ :vartype notebook_access_keys: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ 'notebook_access_keys': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookAccessTokenResult(msrest.serialization.Model):
+ """NotebookAccessTokenResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar notebook_resource_id:
+ :vartype notebook_resource_id: str
+ :ivar host_name:
+ :vartype host_name: str
+ :ivar public_dns:
+ :vartype public_dns: str
+ :ivar access_token:
+ :vartype access_token: str
+ :ivar token_type:
+ :vartype token_type: str
+ :ivar expires_in:
+ :vartype expires_in: int
+ :ivar refresh_token:
+ :vartype refresh_token: str
+ :ivar scope:
+ :vartype scope: str
+ """
+
+ _validation = {
+ 'notebook_resource_id': {'readonly': True},
+ 'host_name': {'readonly': True},
+ 'public_dns': {'readonly': True},
+ 'access_token': {'readonly': True},
+ 'token_type': {'readonly': True},
+ 'expires_in': {'readonly': True},
+ 'refresh_token': {'readonly': True},
+ 'scope': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'},
+ 'host_name': {'key': 'hostName', 'type': 'str'},
+ 'public_dns': {'key': 'publicDns', 'type': 'str'},
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expires_in': {'key': 'expiresIn', 'type': 'int'},
+ 'refresh_token': {'key': 'refreshToken', 'type': 'str'},
+ 'scope': {'key': 'scope', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookAccessTokenResult, self).__init__(**kwargs)
+ self.notebook_resource_id = None
+ self.host_name = None
+ self.public_dns = None
+ self.access_token = None
+ self.token_type = None
+ self.expires_in = None
+ self.refresh_token = None
+ self.scope = None
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error_message: Optional[str] = None,
+ status_code: Optional[int] = None,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = error_message
+ self.status_code = status_code
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ fqdn: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ notebook_preparation_error: Optional["NotebookPreparationError"] = None,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = fqdn
+ self.resource_id = resource_id
+ self.notebook_preparation_error = notebook_preparation_error
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ display: Optional["OperationDisplay"] = None,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = name
+ self.display = display
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ provider: Optional[str] = None,
+ resource: Optional[str] = None,
+ operation: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = provider
+ self.resource = resource
+ self.operation = operation
+ self.description = description
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Operation"]] = None,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComputeResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceConnection"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ *,
+ assigned_user: Optional["AssignedUser"] = None,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = assigned_user
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to.
+ :vartype subnet_arm_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'subnet_arm_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+ self.subnet_arm_id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ private_endpoint: Optional["PrivateEndpoint"] = None,
+ private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.private_endpoint = private_endpoint
+ self.private_link_service_connection_state = private_link_service_connection_state
+ self.provisioning_state = None
+
+
+class PrivateEndpointConnectionListResult(msrest.serialization.Model):
+ """List of private endpoint connection associated with the specified workspace.
+
+ :param value: Array of private endpoint connections.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateEndpointConnection"]] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnectionListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ required_zone_names: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = required_zone_names
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ description: Optional[str] = None,
+ actions_required: Optional[str] = None,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = status
+ self.description = description
+ self.actions_required = actions_required
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ type: Optional[str] = None,
+ limit: Optional[int] = None,
+ unit: Optional[Union[str, "QuotaUnit"]] = None,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = id
+ self.type = type
+ self.limit = limit
+ self.unit = unit
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ :param location: Region of workspace quota to be updated.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["QuotaBaseProperties"]] = None,
+ location: Optional[str] = None,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = value
+ self.location = location
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ passwords: Optional[List["Password"]] = None,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = passwords
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = id
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ reason_code: Optional[Union[str, "ReasonCode"]] = None,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = reason_code
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_node_count: int,
+ min_node_count: Optional[int] = 0,
+ node_idle_time_before_scale_down: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = max_node_count
+ self.min_node_count = min_node_count
+ self.node_idle_time_before_scale_down = node_idle_time_before_scale_down
+
+
+class ScaleSettingsInformation(msrest.serialization.Model):
+ """Desired scale settings for the amlCompute.
+
+ :param scale_settings: scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scale_settings: Optional["ScaleSettings"] = None,
+ **kwargs
+ ):
+ super(ScaleSettingsInformation, self).__init__(**kwargs)
+ self.scale_settings = scale_settings
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ script_source: Optional[str] = None,
+ script_data: Optional[str] = None,
+ script_arguments: Optional[str] = None,
+ timeout: Optional[str] = None,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = script_source
+ self.script_data = script_data
+ self.script_arguments = script_arguments
+ self.timeout = timeout
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ startup_script: Optional["ScriptReference"] = None,
+ creation_script: Optional["ScriptReference"] = None,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = startup_script
+ self.creation_script = creation_script
+
+
+class ServiceManagedResourcesSettings(msrest.serialization.Model):
+ """ServiceManagedResourcesSettings.
+
+ :param cosmos_db: The settings for the service managed cosmosdb account.
+ :type cosmos_db: ~azure_machine_learning_workspaces.models.CosmosDbSettings
+ """
+
+ _attribute_map = {
+ 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cosmos_db: Optional["CosmosDbSettings"] = None,
+ **kwargs
+ ):
+ super(ServiceManagedResourcesSettings, self).__init__(**kwargs)
+ self.cosmos_db = cosmos_db
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ client_secret: str,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scripts: Optional["ScriptsToExecute"] = None,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = scripts
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ private_link_resource_id: Optional[str] = None,
+ group_id: Optional[str] = None,
+ request_message: Optional[str] = None,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = name
+ self.private_link_resource_id = private_link_resource_id
+ self.group_id = group_id
+ self.request_message = request_message
+ self.status = status
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ tier: Optional[str] = None,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = name
+ self.tier = tier
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceSku"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ :param leaf_domain_label: Leaf domain label of public endpoint.
+ :type leaf_domain_label: str
+ :param overwrite_existing_domain: Indicates whether to overwrite existing domain label.
+ :type overwrite_existing_domain: bool
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'},
+ 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "SslConfigurationStatus"]] = None,
+ cert: Optional[str] = None,
+ key: Optional[str] = None,
+ cname: Optional[str] = None,
+ leaf_domain_label: Optional[str] = None,
+ overwrite_existing_domain: Optional[bool] = None,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = status
+ self.cert = cert
+ self.key = key
+ self.cname = cname
+ self.leaf_domain_label = leaf_domain_label
+ self.overwrite_existing_domain = overwrite_existing_domain
+
+
+class SynapseSpark(Compute):
+ """A SynapseSpark compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.SynapseSparkProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["SynapseSparkProperties"] = None,
+ **kwargs
+ ):
+ super(SynapseSpark, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'SynapseSpark' # type: str
+ self.properties = properties
+
+
+class SynapseSparkProperties(msrest.serialization.Model):
+ """SynapseSparkProperties.
+
+ :param auto_scale_properties: Auto scale properties.
+ :type auto_scale_properties: ~azure_machine_learning_workspaces.models.AutoScaleProperties
+ :param auto_pause_properties: Auto pause properties.
+ :type auto_pause_properties: ~azure_machine_learning_workspaces.models.AutoPauseProperties
+ :param spark_version: Spark version.
+ :type spark_version: str
+ :param node_count: The number of compute nodes currently assigned to the compute.
+ :type node_count: int
+ :param node_size: Node size.
+ :type node_size: str
+ :param node_size_family: Node size family.
+ :type node_size_family: str
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param resource_group: Name of the resource group in which workspace is located.
+ :type resource_group: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param pool_name: Pool name.
+ :type pool_name: str
+ """
+
+ _attribute_map = {
+ 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'},
+ 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'},
+ 'spark_version': {'key': 'sparkVersion', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'node_size': {'key': 'nodeSize', 'type': 'str'},
+ 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'},
+ 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
+ 'resource_group': {'key': 'resourceGroup', 'type': 'str'},
+ 'workspace_name': {'key': 'workspaceName', 'type': 'str'},
+ 'pool_name': {'key': 'poolName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ auto_scale_properties: Optional["AutoScaleProperties"] = None,
+ auto_pause_properties: Optional["AutoPauseProperties"] = None,
+ spark_version: Optional[str] = None,
+ node_count: Optional[int] = None,
+ node_size: Optional[str] = None,
+ node_size_family: Optional[str] = None,
+ subscription_id: Optional[str] = None,
+ resource_group: Optional[str] = None,
+ workspace_name: Optional[str] = None,
+ pool_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(SynapseSparkProperties, self).__init__(**kwargs)
+ self.auto_scale_properties = auto_scale_properties
+ self.auto_pause_properties = auto_pause_properties
+ self.spark_version = spark_version
+ self.node_count = node_count
+ self.node_size = node_size
+ self.node_size_family = node_size_family
+ self.subscription_id = subscription_id
+ self.resource_group = resource_group
+ self.workspace_name = workspace_name
+ self.pool_name = pool_name
+
+
+class SystemData(msrest.serialization.Model):
+ """Metadata pertaining to creation and last modification of the resource.
+
+ :param created_by: The identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: The identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.CreatedByType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ *,
+ created_by: Optional[str] = None,
+ created_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ created_at: Optional[datetime.datetime] = None,
+ last_modified_by: Optional[str] = None,
+ last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None,
+ last_modified_at: Optional[datetime.datetime] = None,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = created_by
+ self.created_by_type = created_by_type
+ self.created_at = created_at
+ self.last_modified_by = last_modified_by
+ self.last_modified_by_type = last_modified_by_type
+ self.last_modified_at = last_modified_at
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ limit: Optional[int] = None,
+ status: Optional[Union[str, "Status"]] = None,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = limit
+ self.unit = None
+ self.status = status
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ admin_user_name: str,
+ admin_user_ssh_public_key: Optional[str] = None,
+ admin_user_password: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = admin_user_name
+ self.admin_user_ssh_public_key = admin_user_ssh_public_key
+ self.admin_user_password = admin_user_password
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors: list[~azure_machine_learning_workspaces.models.ErrorResponse]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["VirtualMachineProperties"] = None,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = properties
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = id
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ :param is_notebook_instance_compute: Indicates whether this compute will be used for running
+ notebooks.
+ :type is_notebook_instance_compute: bool
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ virtual_machine_size: Optional[str] = None,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ is_notebook_instance_compute: Optional[bool] = None,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = virtual_machine_size
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+ self.is_notebook_instance_compute = is_notebook_instance_compute
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine",
+ "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ :param supported_compute_types: Specifies the compute types supported by the virtual machine
+ size.
+ :type supported_compute_types: list[str]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ estimated_vm_prices: Optional["EstimatedVmPrices"] = None,
+ supported_compute_types: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = estimated_vm_prices
+ self.supported_compute_types = supported_compute_types
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param value: The list of virtual machine sizes supported by AmlCompute.
+ :type value: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["VirtualMachineSize"]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ public_key_data: Optional[str] = None,
+ private_key_data: Optional[str] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = username
+ self.password = password
+ self.public_key_data = public_key_data
+ self.private_key_data = private_key_data
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Fully qualified resource ID for the resource. Ex -
+ /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.
+ :vartype id: str
+ :ivar name: The name of the resource.
+ :vartype name: str
+ :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or
+ "Microsoft.Storage/storageAccounts".
+ :vartype type: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: System data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :param public_network_access: Whether requests from Public Network are allowed. Possible values
+ include: "Enabled", "Disabled".
+ :type public_network_access: str or
+ ~azure_machine_learning_workspaces.models.PublicNetworkAccess
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :ivar tenant_id: The tenant id associated with this workspace.
+ :vartype tenant_id: str
+ :ivar storage_hns_enabled: If the storage associated with the workspace has hierarchical
+ namespace(HNS) enabled.
+ :vartype storage_hns_enabled: bool
+ :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow
+ must point at to set up tracking.
+ :vartype ml_flow_tracking_uri: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'storage_hns_enabled': {'readonly': True},
+ 'ml_flow_tracking_uri': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
+ 'storage_hns_enabled': {'key': 'properties.storageHnsEnabled', 'type': 'bool'},
+ 'ml_flow_tracking_uri': {'key': 'properties.mlFlowTrackingUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ key_vault: Optional[str] = None,
+ application_insights: Optional[str] = None,
+ container_registry: Optional[str] = None,
+ storage_account: Optional[str] = None,
+ discovery_url: Optional[str] = None,
+ encryption: Optional["EncryptionProperty"] = None,
+ hbi_workspace: Optional[bool] = False,
+ image_build_compute: Optional[str] = None,
+ allow_public_access_when_behind_vnet: Optional[bool] = False,
+ public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None,
+ shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None,
+ service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None,
+ primary_user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.identity = identity
+ self.location = location
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+ self.workspace_id = None
+ self.description = description
+ self.friendly_name = friendly_name
+ self.key_vault = key_vault
+ self.application_insights = application_insights
+ self.container_registry = container_registry
+ self.storage_account = storage_account
+ self.discovery_url = discovery_url
+ self.provisioning_state = None
+ self.encryption = encryption
+ self.hbi_workspace = hbi_workspace
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = image_build_compute
+ self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet
+ self.public_network_access = public_network_access
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = shared_private_link_resources
+ self.notebook_info = None
+ self.service_managed_resources_settings = service_managed_resources_settings
+ self.primary_user_assigned_identity = primary_user_assigned_identity
+ self.tenant_id = None
+ self.storage_hns_enabled = None
+ self.ml_flow_tracking_uri = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ value_format: Optional[Union[str, "ValueFormat"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+ self.value_format = value_format
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Workspace"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ restrictions: Optional[List["Restriction"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = restrictions
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :param public_network_access: Whether requests from Public Network are allowed. Possible values
+ include: "Enabled", "Disabled".
+ :type public_network_access: str or
+ ~azure_machine_learning_workspaces.models.PublicNetworkAccess
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ identity: Optional["Identity"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ image_build_compute: Optional[str] = None,
+ service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None,
+ primary_user_assigned_identity: Optional[str] = None,
+ public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = tags
+ self.sku = sku
+ self.identity = identity
+ self.description = description
+ self.friendly_name = friendly_name
+ self.image_build_compute = image_build_compute
+ self.service_managed_resources_settings = service_managed_resources_settings
+ self.primary_user_assigned_identity = primary_user_assigned_identity
+ self.public_network_access = public_network_access
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
new file mode 100644
index 00000000000..44c7bf6aeeb
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
@@ -0,0 +1,33 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._compute_operations import ComputeOperations
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._workspace_skus_operations import WorkspaceSkusOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'ComputeOperations',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'WorkspaceConnectionsOperations',
+ 'WorkspaceFeaturesOperations',
+ 'WorkspaceSkusOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_compute_operations.py
new file mode 100644
index 00000000000..1c349c852a0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_compute_operations.py
@@ -0,0 +1,1102 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class ComputeOperations(object):
+ """ComputeOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedComputeResourcesList"]
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.AmlComputeNodesInformation"]
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeSecrets"
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ def _start_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._start_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def begin_start(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._start_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def _stop_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._stop_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def begin_stop(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._stop_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def _restart_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._restart_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
+
+ def begin_restart(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._restart_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
new file mode 100644
index 00000000000..b3c84e56cba
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations(object):
+ """Operations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OperationListResult"]
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..95fedd8f9be
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,322 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations(object):
+ """PrivateEndpointConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PrivateEndpointConnectionListResult"]
+ """List all the private endpoint connections associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PrivateEndpointConnectionListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnectionListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PrivateEndpointConnectionListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ properties, # type: "models.PrivateEndpointConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create_or_update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..30319eb2919
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations(object):
+ """PrivateLinkResourcesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateLinkResourceListResult"
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
new file mode 100644
index 00000000000..8ce981b5f0e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
@@ -0,0 +1,182 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations(object):
+ """QuotasOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def update(
+ self,
+ location, # type: str
+ parameters, # type: "models.QuotaUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.UpdateWorkspaceQuotasResult"
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListWorkspaceQuotas"]
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
new file mode 100644
index 00000000000..7cffe1e8568
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations(object):
+ """UsagesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListUsagesResult"]
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..78d7960aba7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,100 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations(object):
+ """VirtualMachineSizesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.VirtualMachineSizeListResult"
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..48095db55aa
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
@@ -0,0 +1,329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations(object):
+ """WorkspaceConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ target=None, # type: Optional[str]
+ category=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedWorkspaceConnectionsList"]
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ parameters, # type: "models.WorkspaceConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Add a new workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a workspace connection.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..056e1ad677c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
@@ -0,0 +1,122 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations(object):
+ """WorkspaceFeaturesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListAmlUserFeatureResult"]
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_skus_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_skus_operations.py
new file mode 100644
index 00000000000..6a2576cf518
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_skus_operations.py
@@ -0,0 +1,114 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceSkusOperations(object):
+ """WorkspaceSkusOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.SkuListResult"]
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..afd32b26301
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
@@ -0,0 +1,1240 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations(object):
+ """WorkspacesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.Workspace"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.Workspace"]
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.WorkspaceUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def _diagnose_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters=None, # type: Optional["models.DiagnoseWorkspaceParameters"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.DiagnoseResponseResult"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DiagnoseResponseResult"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._diagnose_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ if parameters is not None:
+ body_content = self._serialize.body(parameters, 'DiagnoseWorkspaceParameters')
+ else:
+ body_content = None
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response)
+
+ if response.status_code == 202:
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+ response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _diagnose_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose'} # type: ignore
+
+ def begin_diagnose(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters=None, # type: Optional["models.DiagnoseWorkspaceParameters"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.DiagnoseResponseResult"]
+ """Diagnose workspace setup issue.
+
+ Diagnose workspace setup issue.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameter of diagnosing workspace health.
+ :type parameters: ~azure_machine_learning_workspaces.models.DiagnoseWorkspaceParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.DiagnoseResponseResult]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.DiagnoseResponseResult"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._diagnose_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_diagnose.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListWorkspaceKeysResult"
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ def _resync_keys_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resync_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _resync_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def begin_resync_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._resync_keys_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.ErrorResponse, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_notebook_access_token(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.NotebookAccessTokenResult"
+ """return notebook access token and refresh token.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: NotebookAccessTokenResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.NotebookAccessTokenResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookAccessTokenResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_access_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} # type: ignore
+
+ def _prepare_notebook_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.NotebookResourceInfo"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_notebook_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_notebook_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def begin_prepare_notebook(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.NotebookResourceInfo"]
+ """Prepare a notebook.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._prepare_notebook_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare_notebook.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def list_storage_account_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListStorageAccountKeysResult"
+ """List storage account keys of a workspace.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListStorageAccountKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListStorageAccountKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListStorageAccountKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_storage_account_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_storage_account_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} # type: ignore
+
+ def list_notebook_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListNotebookKeysResult"
+ """List keys of a notebook.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
+
+ def list_outbound_network_dependencies_endpoints(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ExternalFqdnResponse"
+ """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) programmatically.
+
+ Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs)
+ programmatically.
+
+ :param resource_group_name: The name of the resource group. The name is case insensitive.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ExternalFqdnResponse, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ExternalFqdnResponse
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ExternalFqdnResponse"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-07-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_outbound_network_dependencies_endpoints.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.ErrorResponse, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ExternalFqdnResponse', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_outbound_network_dependencies_endpoints.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
new file mode 100644
index 00000000000..e5aff4f83af
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/src/machinelearningservices/report.md b/src/machinelearningservices/report.md
new file mode 100644
index 00000000000..7c2d72f4b7c
--- /dev/null
+++ b/src/machinelearningservices/report.md
@@ -0,0 +1,767 @@
+# Azure CLI Module Creation Report
+
+## EXTENSION
+|CLI Extension|Command Groups|
+|---------|------------|
+|az machinelearningservices|[groups](#CommandGroups)
+
+## GROUPS
+### Command groups in `az machinelearningservices` extension
+|CLI Command Group|Group Swagger name|Commands|
+|---------|------------|--------|
+|az machinelearningservices workspace|Workspaces|[commands](#CommandsInWorkspaces)|
+|az machinelearningservices usage|Usages|[commands](#CommandsInUsages)|
+|az machinelearningservices virtual-machine-size|VirtualMachineSizes|[commands](#CommandsInVirtualMachineSizes)|
+|az machinelearningservices quota|Quotas|[commands](#CommandsInQuotas)|
+|az machinelearningservices compute|Compute|[commands](#CommandsInCompute)|
+|az machinelearningservices private-endpoint-connection|PrivateEndpointConnections|[commands](#CommandsInPrivateEndpointConnections)|
+|az machinelearningservices private-link-resource|PrivateLinkResources|[commands](#CommandsInPrivateLinkResources)|
+|az machinelearningservices workspace-connection|WorkspaceConnections|[commands](#CommandsInWorkspaceConnections)|
+|az machinelearningservices workspace-feature|WorkspaceFeatures|[commands](#CommandsInWorkspaceFeatures)|
+|az machinelearningservices workspace-sku|WorkspaceSkus|[commands](#CommandsInWorkspaceSkus)|
+
+## COMMANDS
+### Commands in `az machinelearningservices compute` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices compute list](#ComputeList)|List|[Parameters](#ParametersComputeList)|[Example](#ExamplesComputeList)|
+|[az machinelearningservices compute show](#ComputeGet)|Get|[Parameters](#ParametersComputeGet)|[Example](#ExamplesComputeGet)|
+|[az machinelearningservices compute create](#ComputeCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersComputeCreateOrUpdate#Create)|[Example](#ExamplesComputeCreateOrUpdate#Create)|
+|[az machinelearningservices compute update](#ComputeUpdate)|Update|[Parameters](#ParametersComputeUpdate)|[Example](#ExamplesComputeUpdate)|
+|[az machinelearningservices compute delete](#ComputeDelete)|Delete|[Parameters](#ParametersComputeDelete)|[Example](#ExamplesComputeDelete)|
+|[az machinelearningservices compute list-key](#ComputeListKeys)|ListKeys|[Parameters](#ParametersComputeListKeys)|[Example](#ExamplesComputeListKeys)|
+|[az machinelearningservices compute list-node](#ComputeListNodes)|ListNodes|[Parameters](#ParametersComputeListNodes)|[Example](#ExamplesComputeListNodes)|
+|[az machinelearningservices compute restart](#ComputeRestart)|Restart|[Parameters](#ParametersComputeRestart)|[Example](#ExamplesComputeRestart)|
+|[az machinelearningservices compute start](#ComputeStart)|Start|[Parameters](#ParametersComputeStart)|[Example](#ExamplesComputeStart)|
+|[az machinelearningservices compute stop](#ComputeStop)|Stop|[Parameters](#ParametersComputeStop)|[Example](#ExamplesComputeStop)|
+
+### Commands in `az machinelearningservices private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-endpoint-connection list](#PrivateEndpointConnectionsList)|List|[Parameters](#ParametersPrivateEndpointConnectionsList)|[Example](#ExamplesPrivateEndpointConnectionsList)|
+|[az machinelearningservices private-endpoint-connection show](#PrivateEndpointConnectionsGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionsGet)|[Example](#ExamplesPrivateEndpointConnectionsGet)|
+|[az machinelearningservices private-endpoint-connection create](#PrivateEndpointConnectionsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPrivateEndpointConnectionsCreateOrUpdate#Create)|[Example](#ExamplesPrivateEndpointConnectionsCreateOrUpdate#Create)|
+|[az machinelearningservices private-endpoint-connection update](#PrivateEndpointConnectionsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPrivateEndpointConnectionsCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices private-endpoint-connection delete](#PrivateEndpointConnectionsDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionsDelete)|[Example](#ExamplesPrivateEndpointConnectionsDelete)|
+
+### Commands in `az machinelearningservices private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-link-resource list](#PrivateLinkResourcesList)|List|[Parameters](#ParametersPrivateLinkResourcesList)|[Example](#ExamplesPrivateLinkResourcesList)|
+
+### Commands in `az machinelearningservices quota` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices quota list](#QuotasList)|List|[Parameters](#ParametersQuotasList)|[Example](#ExamplesQuotasList)|
+|[az machinelearningservices quota update](#QuotasUpdate)|Update|[Parameters](#ParametersQuotasUpdate)|[Example](#ExamplesQuotasUpdate)|
+
+### Commands in `az machinelearningservices usage` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices usage list](#UsagesList)|List|[Parameters](#ParametersUsagesList)|[Example](#ExamplesUsagesList)|
+
+### Commands in `az machinelearningservices virtual-machine-size` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices virtual-machine-size list](#VirtualMachineSizesList)|List|[Parameters](#ParametersVirtualMachineSizesList)|[Example](#ExamplesVirtualMachineSizesList)|
+
+### Commands in `az machinelearningservices workspace` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace list](#WorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersWorkspacesListByResourceGroup)|[Example](#ExamplesWorkspacesListByResourceGroup)|
+|[az machinelearningservices workspace list](#WorkspacesListBySubscription)|ListBySubscription|[Parameters](#ParametersWorkspacesListBySubscription)|[Example](#ExamplesWorkspacesListBySubscription)|
+|[az machinelearningservices workspace show](#WorkspacesGet)|Get|[Parameters](#ParametersWorkspacesGet)|[Example](#ExamplesWorkspacesGet)|
+|[az machinelearningservices workspace create](#WorkspacesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersWorkspacesCreateOrUpdate#Create)|[Example](#ExamplesWorkspacesCreateOrUpdate#Create)|
+|[az machinelearningservices workspace update](#WorkspacesUpdate)|Update|[Parameters](#ParametersWorkspacesUpdate)|[Example](#ExamplesWorkspacesUpdate)|
+|[az machinelearningservices workspace delete](#WorkspacesDelete)|Delete|[Parameters](#ParametersWorkspacesDelete)|[Example](#ExamplesWorkspacesDelete)|
+|[az machinelearningservices workspace diagnose](#WorkspacesDiagnose)|Diagnose|[Parameters](#ParametersWorkspacesDiagnose)|[Example](#ExamplesWorkspacesDiagnose)|
+|[az machinelearningservices workspace list-key](#WorkspacesListKeys)|ListKeys|[Parameters](#ParametersWorkspacesListKeys)|[Example](#ExamplesWorkspacesListKeys)|
+|[az machinelearningservices workspace list-notebook-access-token](#WorkspacesListNotebookAccessToken)|ListNotebookAccessToken|[Parameters](#ParametersWorkspacesListNotebookAccessToken)|[Example](#ExamplesWorkspacesListNotebookAccessToken)|
+|[az machinelearningservices workspace list-notebook-key](#WorkspacesListNotebookKeys)|ListNotebookKeys|[Parameters](#ParametersWorkspacesListNotebookKeys)|[Example](#ExamplesWorkspacesListNotebookKeys)|
+|[az machinelearningservices workspace list-outbound-network-dependency-endpoint](#WorkspacesListOutboundNetworkDependenciesEndpoints)|ListOutboundNetworkDependenciesEndpoints|[Parameters](#ParametersWorkspacesListOutboundNetworkDependenciesEndpoints)|[Example](#ExamplesWorkspacesListOutboundNetworkDependenciesEndpoints)|
+|[az machinelearningservices workspace list-storage-account-key](#WorkspacesListStorageAccountKeys)|ListStorageAccountKeys|[Parameters](#ParametersWorkspacesListStorageAccountKeys)|[Example](#ExamplesWorkspacesListStorageAccountKeys)|
+|[az machinelearningservices workspace prepare-notebook](#WorkspacesPrepareNotebook)|PrepareNotebook|[Parameters](#ParametersWorkspacesPrepareNotebook)|[Example](#ExamplesWorkspacesPrepareNotebook)|
+|[az machinelearningservices workspace resync-key](#WorkspacesResyncKeys)|ResyncKeys|[Parameters](#ParametersWorkspacesResyncKeys)|[Example](#ExamplesWorkspacesResyncKeys)|
+
+### Commands in `az machinelearningservices workspace-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-connection list](#WorkspaceConnectionsList)|List|[Parameters](#ParametersWorkspaceConnectionsList)|[Example](#ExamplesWorkspaceConnectionsList)|
+|[az machinelearningservices workspace-connection show](#WorkspaceConnectionsGet)|Get|[Parameters](#ParametersWorkspaceConnectionsGet)|[Example](#ExamplesWorkspaceConnectionsGet)|
+|[az machinelearningservices workspace-connection create](#WorkspaceConnectionsCreate)|Create|[Parameters](#ParametersWorkspaceConnectionsCreate)|[Example](#ExamplesWorkspaceConnectionsCreate)|
+|[az machinelearningservices workspace-connection delete](#WorkspaceConnectionsDelete)|Delete|[Parameters](#ParametersWorkspaceConnectionsDelete)|[Example](#ExamplesWorkspaceConnectionsDelete)|
+
+### Commands in `az machinelearningservices workspace-feature` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-feature list](#WorkspaceFeaturesList)|List|[Parameters](#ParametersWorkspaceFeaturesList)|[Example](#ExamplesWorkspaceFeaturesList)|
+
+### Commands in `az machinelearningservices workspace-sku` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-sku list](#WorkspaceSkusList)|List|[Parameters](#ParametersWorkspaceSkusList)|[Example](#ExamplesWorkspaceSkusList)|
+
+
+## COMMAND DETAILS
+
+### group `az machinelearningservices compute`
+#### Command `az machinelearningservices compute list`
+
+##### Example
+```
+az machinelearningservices compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices compute show`
+
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute show --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute create`
+
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"description\\":\\"some compute\\",\\"computeType\\":\\"Kubernetes\\",\\"properties\\":{\\"defaultInstanceType\\":\
+\\"defaultInstanceType\\",\\"instanceTypes\\":{\\"defaultInstanceType\\":{\\"nodeSelector\\":null,\\"resources\\":{\\"l\
+imits\\":{\\"cpu\\":\\"1\\",\\"memory\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null},\\"requests\\":{\\"cpu\\":\\"1\\",\\"memo\
+ry\\":\\"4Gi\\",\\"nvidia.com/gpu\\":null}}}},\\"namespace\\":\\"default\\"},\\"resourceId\\":\\"/subscriptions/34adfa4\
+f-cedf-4dc0-ba29-b6d1a69ab345/resourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-\
+56826-c9b00420020b2\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AmlCompute\\",\\"properties\\":{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osT\
+ype\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"\
+minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0\
+0000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery\
+/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"DataFactory\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"AKS\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeIns\
+tanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"0\
+0000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\
+\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices compute create --name "compute123" --location "eastus" --properties \
+"{\\"computeType\\":\\"ComputeInstance\\",\\"properties\\":{\\"vmSize\\":\\"STANDARD_NC6\\"}}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--properties**|object|Compute properties|properties|properties|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices compute update`
+
+##### Example
+```
+az machinelearningservices compute update --name "compute123" --scale-settings max-node-count=4 min-node-count=4 \
+node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--scale-settings**|object|scale settings for AML Compute|scale_settings|scaleSettings|
+
+#### Command `az machinelearningservices compute delete`
+
+##### Example
+```
+az machinelearningservices compute delete --name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--underlying-resource-action**|choice|Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'.|underlying_resource_action|underlyingResourceAction|
+
+#### Command `az machinelearningservices compute list-key`
+
+##### Example
+```
+az machinelearningservices compute list-key --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute list-node`
+
+##### Example
+```
+az machinelearningservices compute list-node --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute restart`
+
+##### Example
+```
+az machinelearningservices compute restart --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute start`
+
+##### Example
+```
+az machinelearningservices compute start --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices compute stop`
+
+##### Example
+```
+az machinelearningservices compute stop --name "compute123" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+### group `az machinelearningservices private-endpoint-connection`
+#### Command `az machinelearningservices private-endpoint-connection list`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices private-endpoint-connection show`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" --resource-group \
+"rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection create`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection create --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices private-endpoint-connection update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices private-endpoint-connection delete`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+### group `az machinelearningservices private-link-resource`
+#### Command `az machinelearningservices private-link-resource list`
+
+##### Example
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices quota`
+#### Command `az machinelearningservices quota list`
+
+##### Example
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+#### Command `az machinelearningservices quota update`
+
+##### Example
+```
+az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServices/workspaces/\
+quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningSe\
+rvices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" --value \
+type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/reso\
+urceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluste\
+r_Dedicated_vCPUs" limit=200 unit="Count"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for update quota is queried.|location|location|
+|**--value**|array|The list for update quota.|value|value|
+|**--quota-update-parameters-location**|string|Region of workspace quota to be updated.|quota_update_parameters_location|location|
+
+### group `az machinelearningservices usage`
+#### Command `az machinelearningservices usage list`
+
+##### Example
+```
+az machinelearningservices usage list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+### group `az machinelearningservices virtual-machine-size`
+#### Command `az machinelearningservices virtual-machine-size list`
+
+##### Example
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location upon which virtual-machine-sizes is queried.|location|location|
+
+### group `az machinelearningservices workspace`
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+#### Command `az machinelearningservices workspace show`
+
+##### Example
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace create`
+
+##### Example
+```
+az machinelearningservices workspace create --identity type="SystemAssigned,UserAssigned" \
+userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mi\
+crosoft.ManagedIdentity/userAssignedIdentities/testuai":{}} --location "eastus2euap" --description "test description" \
+--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\
+rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\
+urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" --identity \
+user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mic\
+rosoft.ManagedIdentity/userAssignedIdentities/testuai" --key-vault-properties identity-client-id="" \
+key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" \
+key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft\
+.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false --key-vault \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/\
+testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-22\
+22-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/priva\
+teLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace. This name in mutable|friendly_name|friendlyName|
+|**--key-vault**|string|ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created|key_vault|keyVault|
+|**--application-insights**|string|ARM id of the application insights associated with this workspace. This cannot be changed once the workspace has been created|application_insights|applicationInsights|
+|**--container-registry**|string|ARM id of the container registry associated with this workspace. This cannot be changed once the workspace has been created|container_registry|containerRegistry|
+|**--storage-account**|string|ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created|storage_account|storageAccount|
+|**--discovery-url**|string|Url for the discovery service to identify regional endpoints for machine learning experimentation services|discovery_url|discoveryUrl|
+|**--hbi-workspace**|boolean|The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service|hbi_workspace|hbiWorkspace|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--allow-public-access-when-behind-vnet**|boolean|The flag to indicate whether to allow public access when behind VNet.|allow_public_access_when_behind_vnet|allowPublicAccessWhenBehindVnet|
+|**--public-network-access**|choice|Whether requests from Public Network are allowed.|public_network_access|publicNetworkAccess|
+|**--shared-private-link-resources**|array|The list of shared private link resources in this workspace.|shared_private_link_resources|sharedPrivateLinkResources|
+|**--primary-user-assigned-identity**|string|The user assigned identity resource id that represents the workspace identity.|primary_user_assigned_identity|primaryUserAssignedIdentity|
+|**--collections-throughput**|integer|The throughput of the collections in cosmosdb database|collections_throughput|collectionsThroughput|
+|**--status**|choice|Indicates whether or not the encryption is enabled for the workspace.|status|status|
+|**--identity**|object|The identity that will be used to access the key vault for encryption at rest.|identity|identity|
+|**--key-vault-properties**|object|Customer Key vault properties.|key_vault_properties|keyVaultProperties|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices workspace update`
+
+##### Example
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+--public-network-access "Disabled" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|The resource tags for the machine learning workspace.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace.|friendly_name|friendlyName|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--primary-user-assigned-identity**|string|The user assigned identity resource id that represents the workspace identity.|primary_user_assigned_identity|primaryUserAssignedIdentity|
+|**--public-network-access**|choice|Whether requests from Public Network are allowed.|public_network_access|publicNetworkAccess|
+|**--collections-throughput**|integer|The throughput of the collections in cosmosdb database|collections_throughput|collectionsThroughput|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices workspace delete`
+
+##### Example
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace diagnose`
+
+##### Example
+```
+az machinelearningservices workspace diagnose --application-insights "{}" --container-registry "{}" --dns-resolution \
+"{}" --key-vault "{}" --nsg "{}" --others "{}" --resource-lock "{}" --storage-account "{}" --udr "{}" --resource-group \
+"workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--udr**|dictionary|Setting for diagnosing user defined routing|udr|udr|
+|**--nsg**|dictionary|Setting for diagnosing network security group|nsg|nsg|
+|**--resource-lock**|dictionary|Setting for diagnosing resource lock|resource_lock|resourceLock|
+|**--dns-resolution**|dictionary|Setting for diagnosing dns resolution|dns_resolution|dnsResolution|
+|**--storage-account**|dictionary|Setting for diagnosing dependent storage account|storage_account|storageAccount|
+|**--key-vault**|dictionary|Setting for diagnosing dependent key vault|key_vault|keyVault|
+|**--container-registry**|dictionary|Setting for diagnosing dependent container registry|container_registry|containerRegistry|
+|**--application-insights**|dictionary|Setting for diagnosing dependent application insights|application_insights|applicationInsights|
+|**--others**|dictionary|Setting for diagnosing unclassified category of problems|others|others|
+
+#### Command `az machinelearningservices workspace list-key`
+
+##### Example
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-notebook-access-token`
+
+##### Example
+```
+az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" --name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-notebook-key`
+
+##### Example
+```
+az machinelearningservices workspace list-notebook-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-outbound-network-dependency-endpoint`
+
+##### Example
+```
+az machinelearningservices workspace list-outbound-network-dependency-endpoint --resource-group "workspace-1234" \
+--name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-storage-account-key`
+
+##### Example
+```
+az machinelearningservices workspace list-storage-account-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace prepare-notebook`
+
+##### Example
+```
+az machinelearningservices workspace prepare-notebook --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace resync-key`
+
+##### Example
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace-connection`
+#### Command `az machinelearningservices workspace-connection list`
+
+##### Example
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" --target \
+"www.facebook.com" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--category**|string|Category of the workspace connection.|category|category|
+
+#### Command `az machinelearningservices workspace-connection show`
+
+##### Example
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+#### Command `az machinelearningservices workspace-connection create`
+
+##### Example
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --auth-type "PAT" --category \
+"ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+|**--category**|string|Category of the workspace connection.|category|category|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--auth-type**|string|Authorization type of the workspace connection.|auth_type|authType|
+|**--value**|string|Value details of the workspace connection.|value|value|
+
+#### Command `az machinelearningservices workspace-connection delete`
+
+##### Example
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+### group `az machinelearningservices workspace-feature`
+#### Command `az machinelearningservices workspace-feature list`
+
+##### Example
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace-sku`
+#### Command `az machinelearningservices workspace-sku list`
+
+##### Example
+```
+az machinelearningservices workspace-sku list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.cfg b/src/machinelearningservices/setup.cfg
new file mode 100644
index 00000000000..2fdd96e5d39
--- /dev/null
+++ b/src/machinelearningservices/setup.cfg
@@ -0,0 +1 @@
+#setup.cfg
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.py b/src/machinelearningservices/setup.py
new file mode 100644
index 00000000000..e4ec7166802
--- /dev/null
+++ b/src/machinelearningservices/setup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# --------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------------------------
+
+
+from codecs import open
+from setuptools import setup, find_packages
+
+# HISTORY.rst entry.
+VERSION = '0.1.0'
+try:
+ from azext_machinelearningservices.manual.version import VERSION
+except ImportError:
+ pass
+
+# The full list of classifiers is available at
+# https://pypi.python.org/pypi?%3Aaction=list_classifiers
+CLASSIFIERS = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+]
+
+DEPENDENCIES = []
+
+try:
+ from azext_machinelearningservices.manual.dependency import DEPENDENCIES
+except ImportError:
+ pass
+
+with open('README.md', 'r', encoding='utf-8') as f:
+ README = f.read()
+with open('HISTORY.rst', 'r', encoding='utf-8') as f:
+ HISTORY = f.read()
+
+setup(
+ name='machinelearningservices',
+ version=VERSION,
+ description='Microsoft Azure Command-Line Tools AzureMachineLearningWorkspaces Extension',
+ author='Microsoft Corporation',
+ author_email='azpycli@microsoft.com',
+ url='https://github.com/Azure/azure-cli-extensions/tree/master/src/machinelearningservices',
+ long_description=README + '\n\n' + HISTORY,
+ license='MIT',
+ classifiers=CLASSIFIERS,
+ packages=find_packages(),
+ install_requires=DEPENDENCIES,
+ package_data={'azext_machinelearningservices': ['azext_metadata.json']},
+)