diff --git a/src/machinelearningservices/HISTORY.rst b/src/machinelearningservices/HISTORY.rst
new file mode 100644
index 00000000000..1c139576ba0
--- /dev/null
+++ b/src/machinelearningservices/HISTORY.rst
@@ -0,0 +1,8 @@
+.. :changelog:
+
+Release History
+===============
+
+0.1.0
+++++++
+* Initial release.
diff --git a/src/machinelearningservices/README.md b/src/machinelearningservices/README.md
new file mode 100644
index 00000000000..a6e7c0fb957
--- /dev/null
+++ b/src/machinelearningservices/README.md
@@ -0,0 +1,470 @@
+# Azure CLI machinelearningservices Extension #
+This is the extension for machinelearningservices
+
+### How to use ###
+Install this extension using the below CLI command
+```
+az extension add --name machinelearningservices
+```
+
+### Included Features ###
+#### machinelearningservices workspace ####
+##### Create #####
+```
+az machinelearningservices workspace create \
+ --identity type="SystemAssigned,UserAssigned" userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testuai":{}} \
+ --location "eastus2euap" --description "test description" \
+ --application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/microsoft.insights/components/testinsights" \
+ --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \
+ --identity user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentities/testuai" \
+ --key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --status "Enabled" --friendly-name "HelloName" --hbi-workspace false \
+ --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/testkv" \
+ --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" \
+ --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" \
+ --resource-group "workspace-1234" --name "testworkspace"
+
+az machinelearningservices workspace wait --created --resource-group "{rg}" --name "{myWorkspace}"
+```
+##### Show #####
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List #####
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Update #####
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+ --resource-group "workspace-1234" --name "testworkspace"
+```
+##### List-key #####
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### List-notebook-access-token #####
+```
+az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" \
+ --name "testworkspace"
+```
+##### Resync-key #####
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+#### machinelearningservices workspace-feature ####
+##### List #####
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+#### machinelearningservices usage ####
+##### List #####
+```
+az machinelearningservices usage list --location "eastus"
+```
+#### machinelearningservices virtual-machine-size ####
+##### List #####
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+#### machinelearningservices quota ####
+##### List #####
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Update #####
+```
+az machinelearningservices quota update --location "eastus" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" \
+ --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+```
+#### machinelearningservices machine-learning-compute ####
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aks create #####
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+ --ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Aml-compute create #####
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+ --aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" \
+ --compute-instance-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" \
+ --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Compute-instance create #####
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+ --location "eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-factory create #####
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Data-lake-analytics create #####
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Databricks create #####
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Hd-insight create #####
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Synapse-spark create #####
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Synapse-spark create #####
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+ --location "eastus" \
+ --synapse-spark-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Synapse-spark create #####
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Synapse-spark create #####
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+ --location "eastus" \
+ --synapse-spark-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Synapse-spark create #####
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+ --location "eastus" --synapse-spark-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" \
+ --virtual-machine-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" \
+ --virtual-machine-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Virtual-machine create #####
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+ --location "eastus" --virtual-machine-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Update #####
+```
+az machinelearningservices machine-learning-compute update --compute-name "compute123" \
+ --scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" \
+ --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### List-key #####
+```
+az machinelearningservices machine-learning-compute list-key --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### List-node #####
+```
+az machinelearningservices machine-learning-compute list-node --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Restart #####
+```
+az machinelearningservices machine-learning-compute restart --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Start #####
+```
+az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Stop #####
+```
+az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group "testrg123" \
+ --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group "testrg123" \
+ --underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+#### machinelearningservices workspace ####
+##### List-sku #####
+```
+az machinelearningservices workspace list-sku
+```
+#### machinelearningservices private-endpoint-connection ####
+##### Put #####
+```
+az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+ --private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+ --workspace-name "testworkspace"
+```
+##### Show #####
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Delete #####
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+ --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices private-link-resource ####
+##### List #####
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+#### machinelearningservices machine-learning-service ####
+##### Create #####
+```
+az machinelearningservices machine-learning-service create \
+ --properties "{\\"appInsightsEnabled\\":true,\\"authEnabled\\":true,\\"computeType\\":\\"ACI\\",\\"containerResourceRequirements\\":{\\"cpu\\":1,\\"memoryInGB\\":1},\\"environmentImageRequest\\":{\\"assets\\":[{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":false,\\"url\\":\\"aml://storage/azureml/score.py\\"}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{\\"name\\":\\"AzureML-Scikit-learn-0.20.3\\",\\"docker\\":{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azureml/base:openmpi3.1.2-ubuntu16.04\\",\\"baseImageRegistry\\":{\\"address\\":null,\\"password\\":null,\\"username\\":null}},\\"environmentVariables\\":{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"},\\"inferencingStackVersion\\":null,\\"python\\":{\\"baseCondaEnvironment\\":null,\\"condaDependencies\\":{\\"name\\":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"channels\\":[\\"conda-forge\\"],\\"dependencies\\":[\\"python=3.6.2\\",{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml-defaults==1.0.69\\",\\"azureml-telemetry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-train-core==1.0.69\\",\\"scikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}]},\\"interpreterPath\\":\\"python\\",\\"userManagedDependencies\\":false},\\"spark\\":{\\"packages\\":[],\\"precachePackages\\":true,\\"repositories\\":[]},\\"version\\":\\"3\\"},\\"models\\":[{\\"name\\":\\"sklearn_regression_model.pkl\\",\\"mimeType\\":\\"application/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_regression_model.pkl\\"}]},\\"location\\":\\"eastus2\\"}" \
+ --resource-group "testrg123" --service-name "service456" --workspace-name "workspaces123"
+```
+##### Show #####
+```
+az machinelearningservices machine-learning-service show --resource-group "testrg123" --service-name "service123" \
+ --workspace-name "workspaces123"
+```
+##### List #####
+```
+az machinelearningservices machine-learning-service list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Delete #####
+```
+az machinelearningservices machine-learning-service delete --resource-group "testrg123" --service-name "service123" \
+ --workspace-name "workspaces123"
+```
+#### machinelearningservices notebook ####
+##### List-key #####
+```
+az machinelearningservices notebook list-key --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Prepare #####
+```
+az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name "workspaces123"
+```
+#### machinelearningservices storage-account ####
+##### List-key #####
+```
+az machinelearningservices storage-account list-key --resource-group "testrg123" --workspace-name "workspaces123"
+```
+#### machinelearningservices workspace-connection ####
+##### Create #####
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --name "connection-1" \
+ --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Show #####
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
+##### List #####
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" \
+ --target "www.facebook.com" --workspace-name "workspace-1"
+```
+##### Delete #####
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+ --resource-group "resourceGroup-1" --workspace-name "workspace-1"
+```
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/__init__.py
new file mode 100644
index 00000000000..b234b2a3aa6
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/__init__.py
@@ -0,0 +1,50 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+from azure.cli.core import AzCommandsLoader
+from azext_machinelearningservices.generated._help import helps # pylint: disable=unused-import
+try:
+ from azext_machinelearningservices.manual._help import helps # pylint: disable=reimported
+except ImportError:
+ pass
+
+
+class AzureMachineLearningWorkspacesCommandsLoader(AzCommandsLoader):
+
+ def __init__(self, cli_ctx=None):
+ from azure.cli.core.commands import CliCommandType
+ from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices_cl
+ machinelearningservices_custom = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.custom#{}',
+ client_factory=cf_machinelearningservices_cl)
+ parent = super(AzureMachineLearningWorkspacesCommandsLoader, self)
+ parent.__init__(cli_ctx=cli_ctx, custom_command_type=machinelearningservices_custom)
+
+ def load_command_table(self, args):
+ from azext_machinelearningservices.generated.commands import load_command_table
+ load_command_table(self, args)
+ try:
+ from azext_machinelearningservices.manual.commands import load_command_table as load_command_table_manual
+ load_command_table_manual(self, args)
+ except ImportError:
+ pass
+ return self.command_table
+
+ def load_arguments(self, command):
+ from azext_machinelearningservices.generated._params import load_arguments
+ load_arguments(self, command)
+ try:
+ from azext_machinelearningservices.manual._params import load_arguments as load_arguments_manual
+ load_arguments_manual(self, command)
+ except ImportError:
+ pass
+
+
+COMMAND_LOADER_CLS = AzureMachineLearningWorkspacesCommandsLoader
diff --git a/src/machinelearningservices/azext_machinelearningservices/action.py b/src/machinelearningservices/azext_machinelearningservices/action.py
new file mode 100644
index 00000000000..d95d53bf711
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/action.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.action import * # noqa: F403
+try:
+ from .manual.action import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
new file mode 100644
index 00000000000..cfc30c747c7
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json
@@ -0,0 +1,4 @@
+{
+ "azext.isExperimental": true,
+ "azext.minCliCoreVersion": "2.15.0"
+}
\ No newline at end of file
diff --git a/src/machinelearningservices/azext_machinelearningservices/custom.py b/src/machinelearningservices/azext_machinelearningservices/custom.py
new file mode 100644
index 00000000000..dbe9d5f9742
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/custom.py
@@ -0,0 +1,17 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=wildcard-import
+# pylint: disable=unused-wildcard-import
+
+from .generated.custom import * # noqa: F403
+try:
+ from .manual.custom import * # noqa: F403
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
new file mode 100644
index 00000000000..3334fc1b89f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py
@@ -0,0 +1,68 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+def cf_machinelearningservices_cl(cli_ctx, *_):
+ from azure.cli.core.commands.client_factory import get_mgmt_service_client
+ from azext_machinelearningservices.vendored_sdks.machinelearningservices import AzureMachineLearningWorkspaces
+ return get_mgmt_service_client(cli_ctx,
+ AzureMachineLearningWorkspaces)
+
+
+def cf_workspace(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspaces
+
+
+def cf_workspace_feature(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_features
+
+
+def cf_usage(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).usages
+
+
+def cf_virtual_machine_size(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).virtual_machine_sizes
+
+
+def cf_quota(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).quotas
+
+
+def cf_machine_learning_compute(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).machine_learning_compute
+
+
+def cf_workspace(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace
+
+
+def cf_private_endpoint_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_endpoint_connections
+
+
+def cf_private_link_resource(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).private_link_resources
+
+
+def cf_machine_learning_service(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).machine_learning_service
+
+
+def cf_notebook(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).notebooks
+
+
+def cf_storage_account(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).storage_account
+
+
+def cf_workspace_connection(cli_ctx, *_):
+ return cf_machinelearningservices_cl(cli_ctx).workspace_connections
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_help.py b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
new file mode 100644
index 00000000000..c044d9d1495
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py
@@ -0,0 +1,1066 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+
+from knack.help_files import helps
+
+
+helps['machinelearningservices workspace'] = """
+ type: group
+ short-summary: Manage workspace with machinelearningservices
+"""
+
+helps['machinelearningservices workspace list'] = """
+ type: command
+ short-summary: "Lists all the available machine learning workspaces under the specified resource group. And Lists \
+all the available machine learning workspaces under the specified subscription."
+ examples:
+ - name: Get Workspaces by Resource Group
+ text: |-
+ az machinelearningservices workspace list --resource-group "workspace-1234"
+ - name: Get Workspaces by subscription
+ text: |-
+ az machinelearningservices workspace list
+"""
+
+helps['machinelearningservices workspace show'] = """
+ type: command
+ short-summary: "Gets the properties of the specified machine learning workspace."
+ examples:
+ - name: Get Workspace
+ text: |-
+ az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace create'] = """
+ type: command
+ short-summary: "Create a workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --shared-private-link-resources
+ short-summary: "The list of shared private link resources in this workspace."
+ long-summary: |
+ Usage: --shared-private-link-resources name=XX private-link-resource-id=XX group-id=XX request-message=XX \
+status=XX
+
+ name: Unique name of the private link.
+ private-link-resource-id: The resource id that private link links to.
+ group-id: The private link resource group id.
+ request-message: Request message.
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+
+ Multiple actions can be specified by using more than one --shared-private-link-resources argument.
+ - name: --identity
+ short-summary: "The identity that will be used to access the key vault for encryption at rest."
+ long-summary: |
+ Usage: --identity user-assigned-identity=XX
+
+ user-assigned-identity: The ArmId of the user assigned identity that will be used to access the customer \
+managed key vault
+ - name: --key-vault-properties
+ short-summary: "Customer Key vault properties."
+ long-summary: |
+ Usage: --key-vault-properties key-vault-arm-id=XX key-identifier=XX identity-client-id=XX
+
+ key-vault-arm-id: Required. The ArmId of the keyVault where the customer owned encryption key is present.
+ key-identifier: Required. Key vault uri to access the encryption key.
+ identity-client-id: For future use - The client id of the identity which will be used to access key vault.
+ examples:
+ - name: Create Workspace
+ text: |-
+ az machinelearningservices workspace create --identity type="SystemAssigned,UserAssigned" \
+userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mi\
+crosoft.ManagedIdentity/userAssignedIdentities/testuai":{}} --location "eastus2euap" --description "test description" \
+--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\
+rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\
+urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" --identity \
+user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mic\
+rosoft.ManagedIdentity/userAssignedIdentities/testuai" --key-vault-properties identity-client-id="" \
+key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" \
+key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft\
+.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false --key-vault \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/\
+testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-22\
+22-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/priva\
+teLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace update'] = """
+ type: command
+ short-summary: "Updates a machine learning workspace with the specified parameters."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Update Workspace
+ text: |-
+ az machinelearningservices workspace update --description "new description" --friendly-name "New \
+friendly name" --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace delete'] = """
+ type: command
+ short-summary: "Deletes a machine learning workspace."
+ examples:
+ - name: Delete Workspace
+ text: |-
+ az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+"""
+
+helps['machinelearningservices workspace list-key'] = """
+ type: command
+ short-summary: "Lists all the keys associated with this workspace. This includes keys for the storage account, app \
+insights and password for container registry."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace list-notebook-access-token'] = """
+ type: command
+ short-summary: "return notebook access token and refresh token."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" \
+--name "testworkspace"
+"""
+
+helps['machinelearningservices workspace resync-key'] = """
+ type: command
+ short-summary: "Resync all the keys associated with this workspace. This includes keys for the storage account, \
+app insights and password for container registry."
+ examples:
+ - name: Resync Workspace Keys
+ text: |-
+ az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+"""
+
+helps['machinelearningservices workspace wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices workspace is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+created.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--created
+ - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \
+deleted.
+ text: |-
+ az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \
+--deleted
+"""
+
+helps['machinelearningservices workspace-feature'] = """
+ type: group
+ short-summary: Manage workspace feature with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-feature list'] = """
+ type: command
+ short-summary: "Lists all enabled features for a workspace."
+ examples:
+ - name: List Workspace features
+ text: |-
+ az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices usage'] = """
+ type: group
+ short-summary: Manage usage with machinelearningservices
+"""
+
+helps['machinelearningservices usage list'] = """
+ type: command
+ short-summary: "Gets the current usage information as well as limits for AML resources for given subscription and \
+location."
+ examples:
+ - name: List Usages
+ text: |-
+ az machinelearningservices usage list --location "eastus"
+"""
+
+helps['machinelearningservices virtual-machine-size'] = """
+ type: group
+ short-summary: Manage virtual machine size with machinelearningservices
+"""
+
+helps['machinelearningservices virtual-machine-size list'] = """
+ type: command
+ short-summary: "Returns supported VM Sizes in a location."
+ examples:
+ - name: List VM Sizes
+ text: |-
+ az machinelearningservices virtual-machine-size list --location "eastus"
+"""
+
+helps['machinelearningservices quota'] = """
+ type: group
+ short-summary: Manage quota with machinelearningservices
+"""
+
+helps['machinelearningservices quota list'] = """
+ type: command
+ short-summary: "Gets the currently assigned Workspace Quotas based on VMFamily."
+ examples:
+ - name: List workspace quotas by VMFamily
+ text: |-
+ az machinelearningservices quota list --location "eastus"
+"""
+
+helps['machinelearningservices quota update'] = """
+ type: command
+ short-summary: "Update quota for each VM family in workspace."
+ parameters:
+ - name: --value
+ short-summary: "The list for update quota."
+ long-summary: |
+ Usage: --value id=XX type=XX limit=XX unit=XX
+
+ id: Specifies the resource ID.
+ type: Specifies the resource type.
+ limit: The maximum permitted quota of the resource.
+ unit: An enum describing the unit of quota measurement.
+
+ Multiple actions can be specified by using more than one --value argument.
+ examples:
+ - name: update quotas
+ text: |-
+ az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServi\
+ces/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.Ma\
+chineLearningServices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 \
+unit="Count" --value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0\
+000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standa\
+rd_DSv2_Family_Cluster_Dedicated_vCPUs" limit=200 unit="Count"
+"""
+
+helps['machinelearningservices machine-learning-compute'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices
+"""
+
+helps['machinelearningservices machine-learning-compute list'] = """
+ type: command
+ short-summary: "Gets computes in specified workspace."
+ examples:
+ - name: Get Computes
+ text: |-
+ az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute show'] = """
+ type: command
+ short-summary: "Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not \
+returned - use 'keys' nested resource to get them."
+ examples:
+ - name: Get a AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Get a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Get an ComputeInstance
+ text: |-
+ az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute aks'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group aks
+"""
+
+helps['machinelearningservices machine-learning-compute aks create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"re\
+moteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeI\
+dleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000\
+000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/\
+versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"pe\
+rsonal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-00000000\
+0000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disable\
+d\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \
+"eastus" --ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute aml-compute'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group aml-compute
+"""
+
+helps['machinelearningservices machine-learning-compute aml-compute create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\
+\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNode\
+Count\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000\
+-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images\
+/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthoriz\
+ationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-\
+0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAc\
+cess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \
+--location "eastus" --aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute compute-instance'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group compute-instance
+"""
+
+helps['machinelearningservices machine-learning-compute compute-instance create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\
+\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"min\
+NodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0000\
+0000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/im\
+ages/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAut\
+horizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-\
+0000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPub\
+licAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \
+--location "eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute data-factory'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group data-factory
+"""
+
+helps['machinelearningservices machine-learning-compute data-factory create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute data-lake-analytics'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group data-lake-analytics
+"""
+
+helps['machinelearningservices machine-learning-compute data-lake-analytics create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \
+"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute databricks'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group databricks
+"""
+
+helps['machinelearningservices machine-learning-compute databricks create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute hd-insight'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group hd-insight
+"""
+
+helps['machinelearningservices machine-learning-compute hd-insight create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --administrator-account
+ short-summary: "Admin credentials for master node of the cluster"
+ long-summary: |
+ Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX
+
+ username: Username of admin account
+ password: Password of admin account
+ public-key-data: Public key data
+ private-key-data: Private key data
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute synapse-spark'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group synapse-spark
+"""
+
+helps['machinelearningservices machine-learning-compute synapse-spark create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+--location "eastus" --synapse-spark-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\"\
+:\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNod\
+eCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0000000\
+0-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/image\
+s/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+--location "eastus" --synapse-spark-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthor\
+izationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-000\
+0-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublic\
+Access\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" \
+--location "eastus" --synapse-spark-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute virtual-machine'] = """
+ type: group
+ short-summary: Manage machine learning compute with machinelearningservices sub group virtual-machine
+"""
+
+helps['machinelearningservices machine-learning-compute virtual-machine create'] = """
+ type: command
+ short-summary: "Create compute. This call will overwrite a compute if it exists. This is a nonrecoverable \
+operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ examples:
+ - name: Create AKS Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a AML Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --virtual-machine-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\
+\\":\\"Windows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"min\
+NodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/0000\
+0000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/im\
+ages/myImageDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create a DataFactory Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --virtual-machine-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuth\
+orizationType\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0\
+000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPubl\
+icAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+ - name: Create an ComputeInstance Compute with minimal inputs
+ text: |-
+ az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \
+--location "eastus" --virtual-machine-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute update'] = """
+ type: command
+ short-summary: "Updates properties of a compute. This call will overwrite a compute if it exists. This is a \
+nonrecoverable operation."
+ parameters:
+ - name: --scale-settings
+ short-summary: "Desired scale settings for the amlCompute."
+ long-summary: |
+ Usage: --scale-settings max-node-count=XX min-node-count=XX node-idle-time-before-scale-down=XX
+
+ max-node-count: Required. Max number of nodes to use
+ min-node-count: Min number of nodes to use
+ node-idle-time-before-scale-down: Node Idle Time before scaling down amlCompute. This string needs to be \
+in the RFC Format.
+ examples:
+ - name: Update a AmlCompute Compute
+ text: |-
+ az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \
+max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute delete'] = """
+ type: command
+ short-summary: "Deletes specified Machine Learning compute."
+ examples:
+ - name: Delete Compute
+ text: |-
+ az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group \
+"testrg123" --underlying-resource-action "Delete" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute list-key'] = """
+ type: command
+ short-summary: "Gets secrets related to Machine Learning compute (storage keys, service credentials, etc)."
+ examples:
+ - name: List AKS Compute Keys
+ text: |-
+ az machinelearningservices machine-learning-compute list-key --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute list-node'] = """
+ type: command
+ short-summary: "Get the details (e.g IP address, port etc) of all the compute nodes in the compute."
+ examples:
+ - name: Get compute nodes information for a compute
+ text: |-
+ az machinelearningservices machine-learning-compute list-node --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute restart'] = """
+ type: command
+ short-summary: "Posts a restart action to a compute instance."
+ examples:
+ - name: Restart ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute restart --compute-name "compute123" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute start'] = """
+ type: command
+ short-summary: "Posts a start action to a compute instance."
+ examples:
+ - name: Start ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute stop'] = """
+ type: command
+ short-summary: "Posts a stop action to a compute instance."
+ examples:
+ - name: Stop ComputeInstance Compute
+ text: |-
+ az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-compute wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \
+machine-learning-compute is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully created.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully updated.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --updated
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \
+successfully deleted.
+ text: |-
+ az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \
+"testrg123" --workspace-name "workspaces123" --deleted
+"""
+
+helps['machinelearningservices workspace'] = """
+ type: group
+ short-summary: Manage workspace with machinelearningservices
+"""
+
+helps['machinelearningservices workspace list-sku'] = """
+ type: command
+ short-summary: "Lists all skus with associated features."
+ examples:
+ - name: List Skus
+ text: |-
+ az machinelearningservices workspace list-sku
+"""
+
+helps['machinelearningservices private-endpoint-connection'] = """
+ type: group
+ short-summary: Manage private endpoint connection with machinelearningservices
+"""
+
+helps['machinelearningservices private-endpoint-connection show'] = """
+ type: command
+ short-summary: "Gets the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceGetPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection delete'] = """
+ type: command
+ short-summary: "Deletes the specified private endpoint connection associated with the workspace."
+ examples:
+ - name: WorkspaceDeletePrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-endpoint-connection put'] = """
+ type: command
+ short-summary: "Update the state of specified private endpoint connection associated with the workspace."
+ parameters:
+ - name: --sku
+ short-summary: "The sku of the workspace."
+ long-summary: |
+ Usage: --sku name=XX tier=XX
+
+ name: Name of the sku
+ tier: Tier of the sku like Basic or Enterprise
+ - name: --private-link-service-connection-state
+ short-summary: "A collection of information about the state of the connection between service consumer and \
+provider."
+ long-summary: |
+ Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX
+
+ status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
+ description: The reason for approval/rejection of the connection.
+ actions-required: A message indicating if changes on the service provider require any updates on the \
+consumer.
+ examples:
+ - name: WorkspacePutPrivateEndpointConnection
+ text: |-
+ az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+"""
+
+helps['machinelearningservices private-link-resource'] = """
+ type: group
+ short-summary: Manage private link resource with machinelearningservices
+"""
+
+helps['machinelearningservices private-link-resource list'] = """
+ type: command
+ short-summary: "Gets the private link resources that need to be created for a workspace."
+ examples:
+ - name: WorkspaceListPrivateLinkResources
+ text: |-
+ az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name \
+"testworkspace"
+"""
+
+helps['machinelearningservices machine-learning-service'] = """
+ type: group
+ short-summary: Manage machine learning service with machinelearningservices
+"""
+
+helps['machinelearningservices machine-learning-service list'] = """
+ type: command
+ short-summary: "Gets services in specified workspace."
+ examples:
+ - name: Get Services
+ text: |-
+ az machinelearningservices machine-learning-service list --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service show'] = """
+ type: command
+ short-summary: "Get a Service by name."
+ examples:
+ - name: Get Service
+ text: |-
+ az machinelearningservices machine-learning-service show --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service create'] = """
+ type: command
+ short-summary: "Create service. This call will update a service if it exists. This is a nonrecoverable operation. \
+If your intent is to create a new service, do a GET first to verify that it does not exist yet."
+ examples:
+ - name: Create Or Update service
+ text: |-
+ az machinelearningservices machine-learning-service create --properties "{\\"appInsightsEnabled\\":true,\
+\\"authEnabled\\":true,\\"computeType\\":\\"ACI\\",\\"containerResourceRequirements\\":{\\"cpu\\":1,\\"memoryInGB\\":1}\
+,\\"environmentImageRequest\\":{\\"assets\\":[{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":fal\
+se,\\"url\\":\\"aml://storage/azureml/score.py\\"}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{\\"name\\":\\\
+"AzureML-Scikit-learn-0.20.3\\",\\"docker\\":{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azureml/ba\
+se:openmpi3.1.2-ubuntu16.04\\",\\"baseImageRegistry\\":{\\"address\\":null,\\"password\\":null,\\"username\\":null}},\\\
+"environmentVariables\\":{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"},\\"inferencingStackVersion\\":null,\\"python\\":{\
+\\"baseCondaEnvironment\\":null,\\"condaDependencies\\":{\\"name\\":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"c\
+hannels\\":[\\"conda-forge\\"],\\"dependencies\\":[\\"python=3.6.2\\",{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml\
+-defaults==1.0.69\\",\\"azureml-telemetry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-trai\
+n-core==1.0.69\\",\\"scikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}]},\\"interpr\
+eterPath\\":\\"python\\",\\"userManagedDependencies\\":false},\\"spark\\":{\\"packages\\":[],\\"precachePackages\\":tru\
+e,\\"repositories\\":[]},\\"version\\":\\"3\\"},\\"models\\":[{\\"name\\":\\"sklearn_regression_model.pkl\\",\\"mimeTyp\
+e\\":\\"application/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_regression_model.pkl\\"}]},\\"location\\":\\\
+"eastus2\\"}" --resource-group "testrg123" --service-name "service456" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service update'] = """
+ type: command
+ short-summary: "Update service. This call will update a service if it exists. This is a nonrecoverable operation. \
+If your intent is to Update a new service, do a GET first to verify that it does not exist yet."
+"""
+
+helps['machinelearningservices machine-learning-service delete'] = """
+ type: command
+ short-summary: "Delete a specific Service.."
+ examples:
+ - name: Delete Service
+ text: |-
+ az machinelearningservices machine-learning-service delete --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123"
+"""
+
+helps['machinelearningservices machine-learning-service wait'] = """
+ type: command
+ short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \
+machine-learning-service is met.
+ examples:
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-service is \
+successfully created.
+ text: |-
+ az machinelearningservices machine-learning-service wait --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123" --created
+ - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-service is \
+successfully updated.
+ text: |-
+ az machinelearningservices machine-learning-service wait --resource-group "testrg123" --service-name \
+"service123" --workspace-name "workspaces123" --updated
+"""
+
+helps['machinelearningservices notebook'] = """
+ type: group
+ short-summary: Manage notebook with machinelearningservices
+"""
+
+helps['machinelearningservices notebook list-key'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices notebook list-key --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices notebook prepare'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: Prepare Notebook
+ text: |-
+ az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices storage-account'] = """
+ type: group
+ short-summary: Manage storage account with machinelearningservices
+"""
+
+helps['machinelearningservices storage-account list-key'] = """
+ type: command
+ short-summary: "."
+ examples:
+ - name: List Workspace Keys
+ text: |-
+ az machinelearningservices storage-account list-key --resource-group "testrg123" --workspace-name \
+"workspaces123"
+"""
+
+helps['machinelearningservices workspace-connection'] = """
+ type: group
+ short-summary: Manage workspace connection with machinelearningservices
+"""
+
+helps['machinelearningservices workspace-connection list'] = """
+ type: command
+ short-summary: "List all connections under a AML workspace."
+ examples:
+ - name: ListWorkspaceConnections
+ text: |-
+ az machinelearningservices workspace-connection list --category "ACR" --resource-group \
+"resourceGroup-1" --target "www.facebook.com" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection show'] = """
+ type: command
+ short-summary: "Get the detail of a workspace connection."
+ examples:
+ - name: GetWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection create'] = """
+ type: command
+ short-summary: "Add a new workspace connection."
+ examples:
+ - name: CreateWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection create --connection-name "connection-1" --name \
+"connection-1" --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+"""
+
+helps['machinelearningservices workspace-connection delete'] = """
+ type: command
+ short-summary: "Delete a workspace connection."
+ examples:
+ - name: DeleteWorkspaceConnection
+ text: |-
+ az machinelearningservices workspace-connection delete --connection-name "connection-1" \
+--resource-group "resourceGroup-1" --workspace-name "workspace-1"
+"""
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_params.py b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
new file mode 100644
index 00000000000..2d50b12a4ef
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py
@@ -0,0 +1,540 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-lines
+# pylint: disable=too-many-statements
+
+from azure.cli.core.commands.parameters import (
+ tags_type,
+ get_three_state_flag,
+ get_enum_type,
+ resource_group_name_type,
+ get_location_type
+)
+from azure.cli.core.commands.validators import (
+ get_default_location_from_resource_group,
+ validate_file_or_dict
+)
+from azext_machinelearningservices.action import (
+ AddSku,
+ AddSharedPrivateLinkResources,
+ AddIdentity,
+ AddKeyVaultProperties,
+ AddValue,
+ AddAdministratorAccount,
+ AddScaleSettings,
+ AddPrivateLinkServiceConnectionState
+)
+
+
+def load_arguments(self, _):
+
+ with self.argument_context('machinelearningservices workspace list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices workspace show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace. This name in mutable')
+ c.argument('key_vault', type=str, help='ARM id of the key vault associated with this workspace. This cannot be '
+ 'changed once the workspace has been created')
+ c.argument('application_insights', type=str, help='ARM id of the application insights associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('container_registry', type=str, help='ARM id of the container registry associated with this '
+ 'workspace. This cannot be changed once the workspace has been created')
+ c.argument('storage_account', type=str, help='ARM id of the storage account associated with this workspace. '
+ 'This cannot be changed once the workspace has been created')
+ c.argument('discovery_url', type=str, help='Url for the discovery service to identify regional endpoints for '
+ 'machine learning experimentation services')
+ c.argument('hbi_workspace', arg_type=get_three_state_flag(), help='The flag to signal HBI data in the '
+ 'workspace and reduce diagnostic data collected by the service')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('allow_public_access_when_behind_vnet', arg_type=get_three_state_flag(), help='The flag to indicate '
+ 'whether to allow public access when behind VNet.')
+ c.argument('shared_private_link_resources', action=AddSharedPrivateLinkResources, nargs='+', help='The list of '
+ 'shared private link resources in this workspace.')
+ c.argument('primary_user_assigned_identity', type=str, help='The user assigned identity resource id that '
+ 'represents the workspace identity.')
+ c.argument('collections_throughput', type=int, help='The throughput of the collections in cosmosdb database',
+ arg_group='Service Managed Resources Settings Cosmos Db')
+ c.argument('status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or not the '
+ 'encryption is enabled for the workspace.', arg_group='Encryption')
+ c.argument('identity', action=AddIdentity, nargs='+', help='The identity that will be used to access the key '
+ 'vault for encryption at rest.', arg_group='Encryption')
+ c.argument('key_vault_properties', action=AddKeyVaultProperties, nargs='+', help='Customer Key vault '
+ 'properties.', arg_group='Encryption')
+
+ with self.argument_context('machinelearningservices workspace update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('description', type=str, help='The description of this workspace.')
+ c.argument('friendly_name', type=str, help='The friendly name for this workspace.')
+ c.argument('image_build_compute', type=str, help='The compute name for image build')
+ c.argument('primary_user_assigned_identity', type=str, help='The user assigned identity resource id that '
+ 'represents the workspace identity.')
+ c.argument('collections_throughput', type=int, help='The throughput of the collections in cosmosdb database',
+ arg_group='Service Managed Resources Settings Cosmos Db')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+
+ with self.argument_context('machinelearningservices workspace delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace list-notebook-access-token') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace resync-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure '
+ 'Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices workspace-feature list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices usage list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices virtual-machine-size list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota list') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx))
+
+ with self.argument_context('machinelearningservices quota update') as c:
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name')
+ c.argument('value', action=AddValue, nargs='+', help='The list for update quota.')
+ c.argument('quota_update_parameters_location', type=str, help='Region of workspace quota to be updated.',
+ id_part='name')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute aks create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('ak_s_compute_location', type=str, help='Location for the underlying compute')
+ c.argument('ak_s_description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('ak_s_resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('ak_s_disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication '
+ 'and ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('ak_s_properties', type=validate_file_or_dict, help='AKS properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute aml-compute create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('aml_compute_properties', type=validate_file_or_dict, help='AML Compute properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute compute-instance create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('compute_instance_properties', type=validate_file_or_dict, help='Compute Instance properties '
+ 'Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute data-factory create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute data-lake-analytics create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('data_lake_store_account_name', type=str, help='DataLake Store Account Name')
+
+ with self.argument_context('machinelearningservices machine-learning-compute databricks create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('databricks_access_token', type=str, help='Databricks access token')
+ c.argument('workspace_url', type=str, help='Workspace Url')
+
+ with self.argument_context('machinelearningservices machine-learning-compute hd-insight create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('ssh_port', type=int, help='Port open for ssh connections on the master node of the cluster.')
+ c.argument('address', type=str, help='Public IP address of the master node of the cluster.')
+ c.argument('administrator_account', action=AddAdministratorAccount, nargs='+', help='Admin credentials for '
+ 'master node of the cluster')
+
+ with self.argument_context('machinelearningservices machine-learning-compute synapse-spark create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('synapse_spark_properties', type=validate_file_or_dict, help='AKS properties Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute virtual-machine create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('compute_location', type=str, help='Location for the underlying compute')
+ c.argument('description', type=str, help='The description of the Machine Learning compute.')
+ c.argument('resource_id', type=str, help='ARM resource id of the underlying compute')
+ c.argument('disable_local_auth', arg_type=get_three_state_flag(), help='Opt-out of local authentication and '
+ 'ensure customers can use only MSI and AAD exclusively for authentication.')
+ c.argument('virtual_machine_properties', type=validate_file_or_dict, help=' Expected value: '
+ 'json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+ c.argument('scale_settings', action=AddScaleSettings, nargs='+', help='Desired scale settings for the '
+ 'amlCompute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+ c.argument('underlying_resource_action', arg_type=get_enum_type(['Delete', 'Detach']), help='Delete the '
+ 'underlying compute if \'Delete\', or detach the underlying compute from workspace if \'Detach\'.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute list-node') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.')
+
+ with self.argument_context('machinelearningservices machine-learning-compute restart') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute start') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute stop') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-compute wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices private-endpoint-connection put') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam'
+ 'e'], type=str, help='The name of the private '
+ 'endpoint connection associated with the workspace', id_part='child_name_1')
+ c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False,
+ validator=get_default_location_from_resource_group)
+ c.argument('tags', tags_type)
+ c.argument('sku', action=AddSku, nargs='+', help='The sku of the workspace.')
+ c.argument('type_', options_list=['--type'], arg_type=get_enum_type(['SystemAssigned',
+ 'SystemAssigned,UserAssigned',
+ 'UserAssigned', 'None']), help='The '
+ 'identity type.', arg_group='Identity')
+ c.argument('user_assigned_identities', type=validate_file_or_dict, help='The user assigned identities '
+ 'associated with the resource. Expected value: json-string/@json-file.', arg_group='Identity')
+ c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='+',
+ help='A collection of information about the state of the connection between service consumer and '
+ 'provider.')
+
+ with self.argument_context('machinelearningservices private-link-resource list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices machine-learning-service list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('skip', type=str, help='Continuation token for pagination.')
+ c.argument('model_id', type=str, help='The Model Id.')
+ c.argument('model_name', type=str, help='The Model name.')
+ c.argument('tag', type=str, help='The object tag.')
+ c.argument('tags', tags_type)
+ c.argument('properties', type=str, help='A set of properties with which to filter the returned services. It is '
+ 'a comma separated string of properties key and/or properties key=value Example: '
+ 'propKey1,propKey2,propKey3=value3 .')
+ c.argument('run_id', type=str, help='runId for model associated with service.')
+ c.argument('expand', arg_type=get_three_state_flag(), help='Set to True to include Model details.')
+ c.argument('orderby', arg_type=get_enum_type(['CreatedAtDesc', 'CreatedAtAsc', 'UpdatedAtDesc',
+ 'UpdatedAtAsc']), help='The option to order the response.')
+
+ with self.argument_context('machinelearningservices machine-learning-service show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+ c.argument('expand', arg_type=get_three_state_flag(), help='Set to True to include Model details.')
+
+ with self.argument_context('machinelearningservices machine-learning-service create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.')
+ c.argument('properties', type=validate_file_or_dict, help='The payload that is used to create or update the '
+ 'Service. Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-service update') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+ c.argument('properties', type=validate_file_or_dict, help='The payload that is used to create or update the '
+ 'Service. Expected value: json-string/@json-file.')
+
+ with self.argument_context('machinelearningservices machine-learning-service delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices machine-learning-service wait') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('service_name', type=str, help='Name of the Azure Machine Learning service.',
+ id_part='child_name_1')
+ c.argument('expand', arg_type=get_three_state_flag(), help='Set to True to include Model details.')
+
+ with self.argument_context('machinelearningservices notebook list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices notebook prepare') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+
+ with self.argument_context('machinelearningservices storage-account list-key') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+
+ with self.argument_context('machinelearningservices workspace-connection list') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection show') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
+
+ with self.argument_context('machinelearningservices workspace-connection create') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection')
+ c.argument('name', type=str, help='Friendly name of the workspace connection')
+ c.argument('category', type=str, help='Category of the workspace connection.')
+ c.argument('target', type=str, help='Target of the workspace connection.')
+ c.argument('auth_type', type=str, help='Authorization type of the workspace connection.')
+ c.argument('value', type=str, help='Value details of the workspace connection.')
+
+ with self.argument_context('machinelearningservices workspace-connection delete') as c:
+ c.argument('resource_group_name', resource_group_name_type)
+ c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name')
+ c.argument('connection_name', type=str, help='Friendly name of the workspace connection',
+ id_part='child_name_1')
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
new file mode 100644
index 00000000000..b33a44c1ebf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py
@@ -0,0 +1,9 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/action.py b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
new file mode 100644
index 00000000000..de17d2cf898
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/action.py
@@ -0,0 +1,250 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=protected-access
+
+import argparse
+from collections import defaultdict
+from knack.util import CLIError
+
+
+class AddSku(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.sku = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'tier':
+ d['tier'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter sku. All possible keys are: name, tier'.
+ format(k))
+ return d
+
+
+class AddSharedPrivateLinkResources(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddSharedPrivateLinkResources, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'name':
+ d['name'] = v[0]
+ elif kl == 'private-link-resource-id':
+ d['private_link_resource_id'] = v[0]
+ elif kl == 'group-id':
+ d['group_id'] = v[0]
+ elif kl == 'request-message':
+ d['request_message'] = v[0]
+ elif kl == 'status':
+ d['status'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter shared_private_link_resources. All '
+ 'possible keys are: name, private-link-resource-id, group-id, request-message, status'.
+ format(k))
+ return d
+
+
+class AddIdentity(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.identity = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'user-assigned-identity':
+ d['user_assigned_identity'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: '
+ 'user-assigned-identity'.format(k))
+ return d
+
+
+class AddKeyVaultProperties(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.key_vault_properties = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'key-vault-arm-id':
+ d['key_vault_arm_id'] = v[0]
+ elif kl == 'key-identifier':
+ d['key_identifier'] = v[0]
+ elif kl == 'identity-client-id':
+ d['identity_client_id'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter key_vault_properties. All possible keys '
+ 'are: key-vault-arm-id, key-identifier, identity-client-id'.format(k))
+ return d
+
+
+class AddValue(argparse._AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ super(AddValue, self).__call__(parser, namespace, action, option_string)
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'id':
+ d['id'] = v[0]
+ elif kl == 'type':
+ d['type'] = v[0]
+ elif kl == 'limit':
+ d['limit'] = v[0]
+ elif kl == 'unit':
+ d['unit'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter value. All possible keys are: id, type, '
+ 'limit, unit'.format(k))
+ return d
+
+
+class AddAdministratorAccount(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.administrator_account = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'username':
+ d['username'] = v[0]
+ elif kl == 'password':
+ d['password'] = v[0]
+ elif kl == 'public-key-data':
+ d['public_key_data'] = v[0]
+ elif kl == 'private-key-data':
+ d['private_key_data'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter administrator_account. All possible keys '
+ 'are: username, password, public-key-data, private-key-data'.format(k))
+ return d
+
+
+class AddScaleSettings(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.scale_settings = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ d['min_node_count'] = 0
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'max-node-count':
+ d['max_node_count'] = v[0]
+ elif kl == 'min-node-count':
+ d['min_node_count'] = v[0]
+ elif kl == 'node-idle-time-before-scale-down':
+ d['node_idle_time_before_scale_down'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter scale_settings. All possible keys are: '
+ 'max-node-count, min-node-count, node-idle-time-before-scale-down'.format(k))
+ return d
+
+
+class AddPrivateLinkServiceConnectionState(argparse.Action):
+ def __call__(self, parser, namespace, values, option_string=None):
+ action = self.get_action(values, option_string)
+ namespace.private_link_service_connection_state = action
+
+ def get_action(self, values, option_string): # pylint: disable=no-self-use
+ try:
+ properties = defaultdict(list)
+ for (k, v) in (x.split('=', 1) for x in values):
+ properties[k].append(v)
+ properties = dict(properties)
+ except ValueError:
+ raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string))
+ d = {}
+ for k in properties:
+ kl = k.lower()
+ v = properties[k]
+ if kl == 'status':
+ d['status'] = v[0]
+ elif kl == 'description':
+ d['description'] = v[0]
+ elif kl == 'actions-required':
+ d['actions_required'] = v[0]
+ else:
+ raise CLIError('Unsupported Key {} is provided for parameter private_link_service_connection_state. '
+ 'All possible keys are: status, description, actions-required'.format(k))
+ return d
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/commands.py b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
new file mode 100644
index 00000000000..d9ec03ee369
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py
@@ -0,0 +1,191 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=too-many-statements
+# pylint: disable=too-many-locals
+
+from azure.cli.core.commands import CliCommandType
+
+
+def load_command_table(self, _):
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace
+ machinelearningservices_workspace = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspaces_ope'
+ 'rations#WorkspacesOperations.{}',
+ client_factory=cf_workspace)
+ with self.command_group('machinelearningservices workspace', machinelearningservices_workspace,
+ client_factory=cf_workspace) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_show')
+ g.custom_command('create', 'machinelearningservices_workspace_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_workspace_update')
+ g.custom_command('delete', 'machinelearningservices_workspace_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_workspace_list_key')
+ g.custom_command('list-notebook-access-token', 'machinelearningservices_workspace_list_notebook_access_token')
+ g.custom_command('resync-key', 'machinelearningservices_workspace_resync_key', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_workspace_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_feature
+ machinelearningservices_workspace_feature = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_feat'
+ 'ures_operations#WorkspaceFeaturesOperations.{}',
+ client_factory=cf_workspace_feature)
+ with self.command_group('machinelearningservices workspace-feature', machinelearningservices_workspace_feature,
+ client_factory=cf_workspace_feature) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_feature_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_usage
+ machinelearningservices_usage = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._usages_operati'
+ 'ons#UsagesOperations.{}',
+ client_factory=cf_usage)
+ with self.command_group('machinelearningservices usage', machinelearningservices_usage,
+ client_factory=cf_usage) as g:
+ g.custom_command('list', 'machinelearningservices_usage_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_virtual_machine_size
+ machinelearningservices_virtual_machine_size = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._virtual_machin'
+ 'e_sizes_operations#VirtualMachineSizesOperations.{}',
+ client_factory=cf_virtual_machine_size)
+ with self.command_group('machinelearningservices virtual-machine-size',
+ machinelearningservices_virtual_machine_size,
+ client_factory=cf_virtual_machine_size) as g:
+ g.custom_command('list', 'machinelearningservices_virtual_machine_size_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_quota
+ machinelearningservices_quota = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._quotas_operati'
+ 'ons#QuotasOperations.{}',
+ client_factory=cf_quota)
+ with self.command_group('machinelearningservices quota', machinelearningservices_quota,
+ client_factory=cf_quota) as g:
+ g.custom_command('list', 'machinelearningservices_quota_list')
+ g.custom_command('update', 'machinelearningservices_quota_update')
+
+ from azext_machinelearningservices.generated._client_factory import cf_machine_learning_compute
+ machinelearningservices_machine_learning_compute = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._machine_learni'
+ 'ng_compute_operations#MachineLearningComputeOperations.{}',
+ client_factory=cf_machine_learning_compute)
+ with self.command_group('machinelearningservices machine-learning-compute',
+ machinelearningservices_machine_learning_compute,
+ client_factory=cf_machine_learning_compute) as g:
+ g.custom_command('list', 'machinelearningservices_machine_learning_compute_list')
+ g.custom_show_command('show', 'machinelearningservices_machine_learning_compute_show')
+ g.custom_command('aks create', 'machinelearningservices_machine_learning_compute_aks_create',
+ supports_no_wait=True)
+ g.custom_command('aml-compute create', 'machinelearningservices_machine_learning_compute_aml_compute_create',
+ supports_no_wait=True)
+ g.custom_command('compute-instance create', 'machinelearningservices_machine_learning_compute_compute_instance_'
+ 'create', supports_no_wait=True)
+ g.custom_command('data-factory create', 'machinelearningservices_machine_learning_compute_data_factory_create',
+ supports_no_wait=True)
+ g.custom_command('data-lake-analytics create', 'machinelearningservices_machine_learning_compute_data_lake_anal'
+ 'ytics_create', supports_no_wait=True)
+ g.custom_command('databricks create', 'machinelearningservices_machine_learning_compute_databricks_create',
+ supports_no_wait=True)
+ g.custom_command('hd-insight create', 'machinelearningservices_machine_learning_compute_hd_insight_create',
+ supports_no_wait=True)
+ g.custom_command('synapse-spark create', 'machinelearningservices_machine_learning_compute_synapse_spark_create'
+ '', supports_no_wait=True)
+ g.custom_command('virtual-machine create', 'machinelearningservices_machine_learning_compute_virtual_machine_cr'
+ 'eate', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_machine_learning_compute_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_machine_learning_compute_delete', supports_no_wait=True,
+ confirmation=True)
+ g.custom_command('list-key', 'machinelearningservices_machine_learning_compute_list_key')
+ g.custom_command('list-node', 'machinelearningservices_machine_learning_compute_list_node')
+ g.custom_command('restart', 'machinelearningservices_machine_learning_compute_restart')
+ g.custom_command('start', 'machinelearningservices_machine_learning_compute_start', supports_no_wait=True)
+ g.custom_command('stop', 'machinelearningservices_machine_learning_compute_stop', supports_no_wait=True)
+ g.custom_wait_command('wait', 'machinelearningservices_machine_learning_compute_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace
+ machinelearningservices_workspace = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_oper'
+ 'ations#WorkspaceOperations.{}',
+ client_factory=cf_workspace)
+ with self.command_group('machinelearningservices workspace', machinelearningservices_workspace,
+ client_factory=cf_workspace) as g:
+ g.custom_command('list-sku', 'machinelearningservices_workspace_list_sku')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_endpoint_connection
+ machinelearningservices_private_endpoint_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_endpoi'
+ 'nt_connections_operations#PrivateEndpointConnectionsOperations.{}',
+ client_factory=cf_private_endpoint_connection)
+ with self.command_group('machinelearningservices private-endpoint-connection',
+ machinelearningservices_private_endpoint_connection,
+ client_factory=cf_private_endpoint_connection) as g:
+ g.custom_show_command('show', 'machinelearningservices_private_endpoint_connection_show')
+ g.custom_command('delete', 'machinelearningservices_private_endpoint_connection_delete', confirmation=True)
+ g.custom_command('put', 'machinelearningservices_private_endpoint_connection_put')
+
+ from azext_machinelearningservices.generated._client_factory import cf_private_link_resource
+ machinelearningservices_private_link_resource = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_link_r'
+ 'esources_operations#PrivateLinkResourcesOperations.{}',
+ client_factory=cf_private_link_resource)
+ with self.command_group('machinelearningservices private-link-resource',
+ machinelearningservices_private_link_resource,
+ client_factory=cf_private_link_resource) as g:
+ g.custom_command('list', 'machinelearningservices_private_link_resource_list')
+
+ from azext_machinelearningservices.generated._client_factory import cf_machine_learning_service
+ machinelearningservices_machine_learning_service = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._machine_learni'
+ 'ng_service_operations#MachineLearningServiceOperations.{}',
+ client_factory=cf_machine_learning_service)
+ with self.command_group('machinelearningservices machine-learning-service',
+ machinelearningservices_machine_learning_service,
+ client_factory=cf_machine_learning_service) as g:
+ g.custom_command('list', 'machinelearningservices_machine_learning_service_list')
+ g.custom_show_command('show', 'machinelearningservices_machine_learning_service_show')
+ g.custom_command('create', 'machinelearningservices_machine_learning_service_create', supports_no_wait=True)
+ g.custom_command('update', 'machinelearningservices_machine_learning_service_update', supports_no_wait=True)
+ g.custom_command('delete', 'machinelearningservices_machine_learning_service_delete', confirmation=True)
+ g.custom_wait_command('wait', 'machinelearningservices_machine_learning_service_show')
+
+ from azext_machinelearningservices.generated._client_factory import cf_notebook
+ machinelearningservices_notebook = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._notebooks_oper'
+ 'ations#NotebooksOperations.{}',
+ client_factory=cf_notebook)
+ with self.command_group('machinelearningservices notebook', machinelearningservices_notebook,
+ client_factory=cf_notebook) as g:
+ g.custom_command('list-key', 'machinelearningservices_notebook_list_key')
+ g.custom_command('prepare', 'machinelearningservices_notebook_prepare')
+
+ from azext_machinelearningservices.generated._client_factory import cf_storage_account
+ machinelearningservices_storage_account = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._storage_accoun'
+ 't_operations#StorageAccountOperations.{}',
+ client_factory=cf_storage_account)
+ with self.command_group('machinelearningservices storage-account', machinelearningservices_storage_account,
+ client_factory=cf_storage_account) as g:
+ g.custom_command('list-key', 'machinelearningservices_storage_account_list_key')
+
+ from azext_machinelearningservices.generated._client_factory import cf_workspace_connection
+ machinelearningservices_workspace_connection = CliCommandType(
+ operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_conn'
+ 'ections_operations#WorkspaceConnectionsOperations.{}',
+ client_factory=cf_workspace_connection)
+ with self.command_group('machinelearningservices workspace-connection',
+ machinelearningservices_workspace_connection,
+ client_factory=cf_workspace_connection) as g:
+ g.custom_command('list', 'machinelearningservices_workspace_connection_list')
+ g.custom_show_command('show', 'machinelearningservices_workspace_connection_show')
+ g.custom_command('create', 'machinelearningservices_workspace_connection_create')
+ g.custom_command('delete', 'machinelearningservices_workspace_connection_delete', confirmation=True)
+
+ with self.command_group('machinelearningservices', is_experimental=True):
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/custom.py b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
new file mode 100644
index 00000000000..5df9975a374
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py
@@ -0,0 +1,826 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=line-too-long
+# pylint: disable=too-many-lines
+
+from azure.cli.core.util import sdk_no_wait
+
+
+def machinelearningservices_workspace_list(client,
+ resource_group_name=None,
+ skip=None):
+ if resource_group_name:
+ return client.list_by_resource_group(resource_group_name=resource_group_name,
+ skip=skip)
+ return client.list_by_subscription(skip=skip)
+
+
+def machinelearningservices_workspace_show(client,
+ resource_group_name,
+ workspace_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_create(client,
+ resource_group_name,
+ workspace_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ description=None,
+ friendly_name=None,
+ key_vault=None,
+ application_insights=None,
+ container_registry=None,
+ storage_account=None,
+ discovery_url=None,
+ hbi_workspace=None,
+ image_build_compute=None,
+ allow_public_access_when_behind_vnet=None,
+ shared_private_link_resources=None,
+ primary_user_assigned_identity=None,
+ collections_throughput=None,
+ status=None,
+ identity=None,
+ key_vault_properties=None,
+ no_wait=False):
+ if hbi_workspace is None:
+ hbi_workspace = False
+ if allow_public_access_when_behind_vnet is None:
+ allow_public_access_when_behind_vnet = False
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['friendly_name'] = friendly_name
+ parameters['key_vault'] = key_vault
+ parameters['application_insights'] = application_insights
+ parameters['container_registry'] = container_registry
+ parameters['storage_account'] = storage_account
+ parameters['discovery_url'] = discovery_url
+ parameters['hbi_workspace'] = False if hbi_workspace is None else hbi_workspace
+ parameters['image_build_compute'] = image_build_compute
+ parameters['allow_public_access_when_behind_vnet'] = False if allow_public_access_when_behind_vnet is None else allow_public_access_when_behind_vnet
+ parameters['shared_private_link_resources'] = shared_private_link_resources
+ parameters['primary_user_assigned_identity'] = primary_user_assigned_identity
+ parameters['cosmos_db'] = {}
+ parameters['cosmos_db']['collections_throughput'] = collections_throughput
+ parameters['encryption'] = {}
+ parameters['encryption']['status'] = status
+ parameters['encryption']['identity'] = identity
+ parameters['encryption']['key_vault_properties'] = key_vault_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_update(client,
+ resource_group_name,
+ workspace_name,
+ tags=None,
+ sku=None,
+ description=None,
+ friendly_name=None,
+ image_build_compute=None,
+ primary_user_assigned_identity=None,
+ collections_throughput=None,
+ type_=None,
+ user_assigned_identities=None):
+ parameters = {}
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['description'] = description
+ parameters['friendly_name'] = friendly_name
+ parameters['image_build_compute'] = image_build_compute
+ parameters['primary_user_assigned_identity'] = primary_user_assigned_identity
+ parameters['cosmos_db'] = {}
+ parameters['cosmos_db']['collections_throughput'] = collections_throughput
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ return client.update(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_delete(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_list_notebook_access_token(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_notebook_access_token(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_resync_key(client,
+ resource_group_name,
+ workspace_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_resync_keys,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_feature_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_usage_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_virtual_machine_size_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_list(client,
+ location):
+ return client.list(location=location)
+
+
+def machinelearningservices_quota_update(client,
+ location,
+ value=None,
+ quota_update_parameters_location=None):
+ parameters = {}
+ parameters['value'] = value
+ parameters['location'] = quota_update_parameters_location
+ return client.update(location=location,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skip=skip)
+
+
+def machinelearningservices_machine_learning_compute_show(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_aks_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ ak_s_compute_location=None,
+ ak_s_description=None,
+ ak_s_resource_id=None,
+ ak_s_disable_local_auth=None,
+ ak_s_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'Aks'
+ parameters['properties']['compute_location'] = ak_s_compute_location
+ parameters['properties']['description'] = ak_s_description
+ parameters['properties']['resource_id'] = ak_s_resource_id
+ parameters['properties']['disable_local_auth'] = ak_s_disable_local_auth
+ parameters['properties']['properties'] = ak_s_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_aml_compute_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ aml_compute_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'AmlCompute'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = aml_compute_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_compute_instance_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ compute_instance_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'ComputeInstance'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = compute_instance_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_data_factory_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'DataFactory'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_data_lake_analytics_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ data_lake_store_account_name=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'DataLakeAnalytics'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['data_lake_store_account_name'] = data_lake_store_account_name
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_databricks_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ databricks_access_token=None,
+ workspace_url=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'Databricks'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['databricks_access_token'] = databricks_access_token
+ parameters['properties']['properties']['workspace_url'] = workspace_url
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_hd_insight_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ ssh_port=None,
+ address=None,
+ administrator_account=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'HdInsight'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = {}
+ parameters['properties']['properties']['ssh_port'] = ssh_port
+ parameters['properties']['properties']['address'] = address
+ parameters['properties']['properties']['administrator_account'] = administrator_account
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_synapse_spark_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ synapse_spark_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'SynapseSpark'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = synapse_spark_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_virtual_machine_create(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ compute_location=None,
+ description=None,
+ resource_id=None,
+ disable_local_auth=None,
+ virtual_machine_properties=None,
+ no_wait=False):
+ parameters = {}
+ parameters['location'] = location
+ parameters['tags'] = tags
+ parameters['sku'] = sku
+ parameters['identity'] = {}
+ parameters['identity']['type'] = type_
+ parameters['identity']['user_assigned_identities'] = user_assigned_identities
+ parameters['properties'] = {}
+ parameters['properties']['compute_type'] = 'VirtualMachine'
+ parameters['properties']['compute_location'] = compute_location
+ parameters['properties']['description'] = description
+ parameters['properties']['resource_id'] = resource_id
+ parameters['properties']['disable_local_auth'] = disable_local_auth
+ parameters['properties']['properties'] = virtual_machine_properties
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_update(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ scale_settings=None,
+ no_wait=False):
+ parameters = {}
+ parameters['scale_settings'] = scale_settings
+ return sdk_no_wait(no_wait,
+ client.begin_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters)
+
+
+def machinelearningservices_machine_learning_compute_delete(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ underlying_resource_action,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_delete,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action)
+
+
+def machinelearningservices_machine_learning_compute_list_key(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_list_node(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.list_nodes(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_restart(client,
+ resource_group_name,
+ workspace_name,
+ compute_name):
+ return client.restart(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_start(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_start,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_machine_learning_compute_stop(client,
+ resource_group_name,
+ workspace_name,
+ compute_name,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_stop,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name)
+
+
+def machinelearningservices_workspace_list_sku(client):
+ return client.list_skus()
+
+
+def machinelearningservices_private_endpoint_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name)
+
+
+def machinelearningservices_private_endpoint_connection_put(client,
+ resource_group_name,
+ workspace_name,
+ private_endpoint_connection_name,
+ location=None,
+ tags=None,
+ sku=None,
+ type_=None,
+ user_assigned_identities=None,
+ private_link_service_connection_state=None):
+ properties = {}
+ properties['location'] = location
+ properties['tags'] = tags
+ properties['sku'] = sku
+ properties['identity'] = {}
+ properties['identity']['type'] = type_
+ properties['identity']['user_assigned_identities'] = user_assigned_identities
+ properties['private_link_service_connection_state'] = private_link_service_connection_state
+ return client.put(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ private_endpoint_connection_name=private_endpoint_connection_name,
+ properties=properties)
+
+
+def machinelearningservices_private_link_resource_list(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_machine_learning_service_list(client,
+ resource_group_name,
+ workspace_name,
+ skip=None,
+ model_id=None,
+ model_name=None,
+ tag=None,
+ tags=None,
+ properties=None,
+ run_id=None,
+ expand=None,
+ orderby=None):
+ if orderby is None:
+ orderby = "UpdatedAtDesc"
+ return client.list_by_workspace(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ skip=skip,
+ model_id=model_id,
+ model_name=model_name,
+ tag=tag,
+ tags=tags,
+ properties=properties,
+ run_id=run_id,
+ expand=expand,
+ orderby=orderby)
+
+
+def machinelearningservices_machine_learning_service_show(client,
+ resource_group_name,
+ workspace_name,
+ service_name,
+ expand=None):
+ if expand is None:
+ expand = False
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ expand=expand)
+
+
+def machinelearningservices_machine_learning_service_create(client,
+ resource_group_name,
+ workspace_name,
+ service_name,
+ properties,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties)
+
+
+def machinelearningservices_machine_learning_service_update(client,
+ resource_group_name,
+ workspace_name,
+ service_name,
+ properties,
+ no_wait=False):
+ return sdk_no_wait(no_wait,
+ client.begin_create_or_update,
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties)
+
+
+def machinelearningservices_machine_learning_service_delete(client,
+ resource_group_name,
+ workspace_name,
+ service_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name)
+
+
+def machinelearningservices_notebook_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_notebook_prepare(client,
+ resource_group_name,
+ workspace_name):
+ return client.begin_prepare(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_storage_account_list_key(client,
+ resource_group_name,
+ workspace_name):
+ return client.list_keys(resource_group_name=resource_group_name,
+ workspace_name=workspace_name)
+
+
+def machinelearningservices_workspace_connection_list(client,
+ resource_group_name,
+ workspace_name,
+ target=None,
+ category=None):
+ return client.list(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ target=target,
+ category=category)
+
+
+def machinelearningservices_workspace_connection_show(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.get(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
+
+
+def machinelearningservices_workspace_connection_create(client,
+ resource_group_name,
+ workspace_name,
+ connection_name,
+ name=None,
+ category=None,
+ target=None,
+ auth_type=None,
+ value=None):
+ parameters = {}
+ parameters['name'] = name
+ parameters['category'] = category
+ parameters['target'] = target
+ parameters['auth_type'] = auth_type
+ parameters['value'] = value
+ parameters['value_format'] = "JSON"
+ return client.create(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name,
+ parameters=parameters)
+
+
+def machinelearningservices_workspace_connection_delete(client,
+ resource_group_name,
+ workspace_name,
+ connection_name):
+ return client.delete(resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ connection_name=connection_name)
diff --git a/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
new file mode 100644
index 00000000000..70488e93851
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py
@@ -0,0 +1,116 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+import inspect
+import logging
+import os
+import sys
+import traceback
+import datetime as dt
+
+from azure.core.exceptions import AzureError
+from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError
+
+
+logger = logging.getLogger('azure.cli.testsdk')
+logger.addHandler(logging.StreamHandler())
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
+exceptions = []
+test_map = dict()
+SUCCESSED = "successed"
+FAILED = "failed"
+
+
+def try_manual(func):
+ def import_manual_function(origin_func):
+ from importlib import import_module
+ decorated_path = inspect.getfile(origin_func).lower()
+ module_path = __path__[0].lower()
+ if not decorated_path.startswith(module_path):
+ raise Exception("Decorator can only be used in submodules!")
+ manual_path = os.path.join(
+ decorated_path[module_path.rfind(os.path.sep) + 1:])
+ manual_file_path, manual_file_name = os.path.split(manual_path)
+ module_name, _ = os.path.splitext(manual_file_name)
+ manual_module = "..manual." + \
+ ".".join(manual_file_path.split(os.path.sep) + [module_name, ])
+ return getattr(import_module(manual_module, package=__name__), origin_func.__name__)
+
+ def get_func_to_call():
+ func_to_call = func
+ try:
+ func_to_call = import_manual_function(func)
+ logger.info("Found manual override for %s(...)", func.__name__)
+ except (ImportError, AttributeError):
+ pass
+ return func_to_call
+
+ def wrapper(*args, **kwargs):
+ func_to_call = get_func_to_call()
+ logger.info("running %s()...", func.__name__)
+ try:
+ test_map[func.__name__] = dict()
+ test_map[func.__name__]["result"] = SUCCESSED
+ test_map[func.__name__]["error_message"] = ""
+ test_map[func.__name__]["error_stack"] = ""
+ test_map[func.__name__]["error_normalized"] = ""
+ test_map[func.__name__]["start_dt"] = dt.datetime.utcnow()
+ ret = func_to_call(*args, **kwargs)
+ except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit,
+ JMESPathCheckAssertionError) as e:
+ use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE")
+ if use_exception_cache is None or use_exception_cache.lower() != "true":
+ raise
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ test_map[func.__name__]["result"] = FAILED
+ test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500]
+ test_map[func.__name__]["error_stack"] = traceback.format_exc().replace(
+ "\r\n", " ").replace("\n", " ")[:500]
+ logger.info("--------------------------------------")
+ logger.info("step exception: %s", e)
+ logger.error("--------------------------------------")
+ logger.error("step exception in %s: %s", func.__name__, e)
+ logger.info(traceback.format_exc())
+ exceptions.append((func.__name__, sys.exc_info()))
+ else:
+ test_map[func.__name__]["end_dt"] = dt.datetime.utcnow()
+ return ret
+
+ if inspect.isclass(func):
+ return get_func_to_call()
+ return wrapper
+
+
+def calc_coverage(filename):
+ filename = filename.split(".")[0]
+ coverage_name = filename + "_coverage.md"
+ with open(coverage_name, "w") as f:
+ f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n")
+ total = len(test_map)
+ covered = 0
+ for k, v in test_map.items():
+ if not k.startswith("step_"):
+ total -= 1
+ continue
+ if v["result"] == SUCCESSED:
+ covered += 1
+ f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|"
+ "{end_dt}|\n".format(step_name=k, **v))
+ f.write("Coverage: {}/{}\n".format(covered, total))
+ print("Create coverage\n", file=sys.stderr)
+
+
+def raise_if():
+ if exceptions:
+ if len(exceptions) <= 1:
+ raise exceptions[0][1][1]
+ message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1]))
+ message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]])
+ raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2])
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
new file mode 100644
index 00000000000..2fff640599d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/example_steps.py
@@ -0,0 +1,585 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+
+from .. import try_manual
+
+
+# EXAMPLE: /Workspaces/put/Create Workspace
+@try_manual
+def step_workspace_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace create '
+ '--identity type="SystemAssigned,UserAssigned" userAssignedIdentities={{"/subscriptions/00000000-1111-2222'
+ '-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.ManagedIdentity/userAssignedIdentiti'
+ 'es/testuai":{{}}}} '
+ '--location "eastus2euap" '
+ '--description "test description" '
+ '--application-insights "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.insights'
+ '/components/testinsights" '
+ '--container-registry "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ContainerR'
+ 'egistry/registries/testRegistry" '
+ '--identity user-assigned-identity="/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microso'
+ 'ft.ManagedIdentity/userAssignedIdentities/testuai" '
+ '--key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/'
+ 'aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/{subscription_id}/resourceGroups/{rg}/'
+ 'providers/Microsoft.KeyVault/vaults/testkv" '
+ '--status "Enabled" '
+ '--friendly-name "HelloName" '
+ '--hbi-workspace false '
+ '--key-vault "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vaults/tes'
+ 'tkv" '
+ '--shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/{subscript'
+ 'ion_id}/resourceGroups/{rg}/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkRes'
+ 'ources/{myPrivateLinkResource}" group-id="{myPrivateLinkResource}" request-message="Please approve" '
+ 'status="Approved" '
+ '--storage-account "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storage/sto'
+ 'rageAccounts/{sa}" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=[])
+ test.cmd('az machinelearningservices workspace wait --created '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspace
+@try_manual
+def step_workspace_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace show '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by Resource Group
+@try_manual
+def step_workspace_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '--resource-group "{rg}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/get/Get Workspaces by subscription
+@try_manual
+def step_workspace_list2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list '
+ '-g ""',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/patch/Update Workspace
+@try_manual
+def step_workspace_update(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace update '
+ '--description "new description" '
+ '--friendly-name "New friendly name" '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/List Workspace Keys
+@try_manual
+def step_workspace_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/post/Resync Workspace Keys
+@try_manual
+def step_workspace_resync_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace resync-key '
+ '--resource-group "{rg_3}" '
+ '--name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute
+@try_manual
+def step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\"'
+ ',\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNo'
+ 'deCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"virtualMachineImage\\":{{\\"id\\":\\"/subs'
+ 'criptions/{subscription_id}/resourceGroups/{rg_4}/providers/Microsoft.Compute/galleries/myImageGallery/im'
+ 'ages/myImageDefinition/versions/0.0.1\\"}},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6'
+ '\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute
+@try_manual
+def step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute
+@try_manual
+def step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ return step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create an ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\"'
+ ':\\"personal\\",\\"personalComputeInstanceSettings\\":{{\\"assignedUser\\":{{\\"objectId\\":\\"00000000-0'
+ '000-0000-0000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}}}},\\"sshSetting'
+ 's\\":{{\\"sshPublicAccess\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"'
+ 'STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/put/Create an ComputeInstance Compute with minimal inputs
+@try_manual
+def step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute aks create '
+ '--compute-name "compute123" '
+ '--location "eastus" '
+ '--ak-s-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a AKS Compute
+@try_manual
+def step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute show '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get a AML Compute
+@try_manual
+def step_machine_learning_compute_show2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ return step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get an ComputeInstance
+@try_manual
+def step_machine_learning_compute_show3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ return step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks)
+
+
+# EXAMPLE: /MachineLearningCompute/get/Get Computes
+@try_manual
+def step_machine_learning_compute_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/patch/Update a AmlCompute Compute
+@try_manual
+def step_machine_learning_compute_update(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute update '
+ '--compute-name "compute123" '
+ '--scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Get compute nodes information for a compute
+@try_manual
+def step_machine_learning_compute_list_node(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list-node '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/List AKS Compute Keys
+@try_manual
+def step_machine_learning_compute_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute list-key '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Restart ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_restart(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute restart '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Start ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_start(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute start '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/post/Stop ComputeInstance Compute
+@try_manual
+def step_machine_learning_compute_stop(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute stop '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningCompute/delete/Delete Compute
+@try_manual
+def step_machine_learning_compute_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-compute delete -y '
+ '--compute-name "compute123" '
+ '--resource-group "{rg_3}" '
+ '--underlying-resource-action "Delete" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/put/Create Or Update service
+@try_manual
+def step_machine_learning_service_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service create '
+ '--properties "{{\\"appInsightsEnabled\\":true,\\"authEnabled\\":true,\\"computeType\\":\\"ACI\\",\\"conta'
+ 'inerResourceRequirements\\":{{\\"cpu\\":1,\\"memoryInGB\\":1}},\\"environmentImageRequest\\":{{\\"assets'
+ '\\":[{{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":false,\\"url\\":\\"aml://stor'
+ 'age/azureml/score.py\\"}}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{{\\"name\\":\\"AzureML-S'
+ 'cikit-learn-0.20.3\\",\\"docker\\":{{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azure'
+ 'ml/base:openmpi3.1.2-ubuntu16.04\\",\\"baseImageRegistry\\":{{\\"address\\":null,\\"password\\":null,\\"u'
+ 'sername\\":null}}}},\\"environmentVariables\\":{{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"}},\\"inferenci'
+ 'ngStackVersion\\":null,\\"python\\":{{\\"baseCondaEnvironment\\":null,\\"condaDependencies\\":{{\\"name\\'
+ '":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"channels\\":[\\"conda-forge\\"],\\"dependencies\\":['
+ '\\"python=3.6.2\\",{{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml-defaults==1.0.69\\",\\"azureml-tele'
+ 'metry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-train-core==1.0.69\\",\\"s'
+ 'cikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}}]}},\\"interpreterPa'
+ 'th\\":\\"python\\",\\"userManagedDependencies\\":false}},\\"spark\\":{{\\"packages\\":[],\\"precachePacka'
+ 'ges\\":true,\\"repositories\\":[]}},\\"version\\":\\"3\\"}},\\"models\\":[{{\\"name\\":\\"sklearn_regress'
+ 'ion_model.pkl\\",\\"mimeType\\":\\"application/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_reg'
+ 'ression_model.pkl\\"}}]}},\\"location\\":\\"eastus2\\"}}" '
+ '--resource-group "{rg_3}" '
+ '--service-name "service456" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/get/Get Service
+@try_manual
+def step_machine_learning_service_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service show '
+ '--resource-group "{rg_3}" '
+ '--service-name "service123" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/get/Get Services
+@try_manual
+def step_machine_learning_service_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service list '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /MachineLearningService/delete/Delete Service
+@try_manual
+def step_machine_learning_service_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices machine-learning-service delete -y '
+ '--resource-group "{rg_3}" '
+ '--service-name "service123" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Notebooks/post/List Workspace Keys
+@try_manual
+def step_notebook_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices notebook list-key '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Notebooks/post/Prepare Notebook
+@try_manual
+def step_notebook_prepare(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices notebook prepare '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/put/WorkspacePutPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_put(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection put '
+ '--name "{myPrivateEndpointConnection}" '
+ '--private-link-service-connection-state description="Auto-Approved" status="Approved" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/get/WorkspaceGetPrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection show '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateEndpointConnections/delete/WorkspaceDeletePrivateEndpointConnection
+@try_manual
+def step_private_endpoint_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-endpoint-connection delete -y '
+ '--name "{myPrivateEndpointConnection}" '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /PrivateLinkResources/get/WorkspaceListPrivateLinkResources
+@try_manual
+def step_private_link_resource_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices private-link-resource list '
+ '--resource-group "{rg_6}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/get/List workspace quotas by VMFamily
+@try_manual
+def step_quota_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /Quotas/post/update quotas
+@try_manual
+def step_quota_update(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices quota update '
+ '--location "eastus" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace3}/quotas/{myQuot'
+ 'a}" limit=100 unit="Count" '
+ '--value type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/{subscription_id}/r'
+ 'esourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/workspaces/{myWorkspace4}/quotas/{myQuot'
+ 'a}" limit=200 unit="Count"',
+ checks=checks)
+
+
+# EXAMPLE: /StorageAccount/post/List Workspace Keys
+@try_manual
+def step_storage_account_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices storage-account list-key '
+ '--resource-group "{rg_3}" '
+ '--workspace-name "{myWorkspace2}"',
+ checks=checks)
+
+
+# EXAMPLE: /Usages/get/List Usages
+@try_manual
+def step_usage_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices usage list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /VirtualMachineSizes/get/List VM Sizes
+@try_manual
+def step_virtual_machine_size_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices virtual-machine-size list '
+ '--location "eastus"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspace/get/List Skus
+@try_manual
+def step_workspace_list_sku(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace list-sku',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/put/CreateWorkspaceConnection
+@try_manual
+def step_workspace_connection_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection create '
+ '--connection-name "connection-1" '
+ '--name "connection-1" '
+ '--auth-type "PAT" '
+ '--category "ACR" '
+ '--target "www.facebook.com" '
+ '--value "secrets" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/GetWorkspaceConnection
+@try_manual
+def step_workspace_connection_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection show '
+ '--connection-name "connection-1" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/get/ListWorkspaceConnections
+@try_manual
+def step_workspace_connection_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection list '
+ '--category "ACR" '
+ '--resource-group "{rg_7}" '
+ '--target "www.facebook.com" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceConnections/delete/DeleteWorkspaceConnection
+@try_manual
+def step_workspace_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-connection delete -y '
+ '--connection-name "connection-1" '
+ '--resource-group "{rg_7}" '
+ '--workspace-name "{myWorkspace5}"',
+ checks=checks)
+
+
+# EXAMPLE: /WorkspaceFeatures/get/List Workspace features
+@try_manual
+def step_workspace_feature_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace-feature list '
+ '--resource-group "{rg_4}" '
+ '--workspace-name "{myWorkspace}"',
+ checks=checks)
+
+
+# EXAMPLE: /Workspaces/delete/Delete Workspace
+@try_manual
+def step_workspace_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=None):
+ if checks is None:
+ checks = []
+ test.cmd('az machinelearningservices workspace delete -y '
+ '--resource-group "{rg}" '
+ '--name "{myWorkspace}"',
+ checks=checks)
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
new file mode 100644
index 00000000000..0daa24e6f12
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py
@@ -0,0 +1,284 @@
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+# pylint: disable=line-too-long
+
+import os
+from azure.cli.testsdk import ScenarioTest
+from azure.cli.testsdk import ResourceGroupPreparer
+from azure.cli.testsdk import StorageAccountPreparer
+from .example_steps import step_workspace_create
+from .example_steps import step_workspace_show
+from .example_steps import step_workspace_list
+from .example_steps import step_workspace_list2
+from .example_steps import step_workspace_update
+from .example_steps import step_workspace_list_key
+from .example_steps import step_workspace_resync_key
+from .example_steps import step_machine_learning_compute_aks_create
+from .example_steps import step_machine_learning_compute_aks_create2
+from .example_steps import step_machine_learning_compute_aks_create3
+from .example_steps import step_machine_learning_compute_aks_create4
+from .example_steps import step_machine_learning_compute_aks_create5
+from .example_steps import step_machine_learning_compute_show
+from .example_steps import step_machine_learning_compute_show2
+from .example_steps import step_machine_learning_compute_show3
+from .example_steps import step_machine_learning_compute_list
+from .example_steps import step_machine_learning_compute_update
+from .example_steps import step_machine_learning_compute_list_node
+from .example_steps import step_machine_learning_compute_list_key
+from .example_steps import step_machine_learning_compute_restart
+from .example_steps import step_machine_learning_compute_start
+from .example_steps import step_machine_learning_compute_stop
+from .example_steps import step_machine_learning_compute_delete
+from .example_steps import step_machine_learning_service_create
+from .example_steps import step_machine_learning_service_show
+from .example_steps import step_machine_learning_service_list
+from .example_steps import step_machine_learning_service_delete
+from .example_steps import step_notebook_list_key
+from .example_steps import step_notebook_prepare
+from .example_steps import step_private_endpoint_connection_put
+from .example_steps import step_private_endpoint_connection_show
+from .example_steps import step_private_endpoint_connection_delete
+from .example_steps import step_private_link_resource_list
+from .example_steps import step_quota_list
+from .example_steps import step_quota_update
+from .example_steps import step_storage_account_list_key
+from .example_steps import step_usage_list
+from .example_steps import step_virtual_machine_size_list
+from .example_steps import step_workspace_list_sku
+from .example_steps import step_workspace_connection_create
+from .example_steps import step_workspace_connection_show
+from .example_steps import step_workspace_connection_list
+from .example_steps import step_workspace_connection_delete
+from .example_steps import step_workspace_feature_list
+from .example_steps import step_workspace_delete
+from .. import (
+ try_manual,
+ raise_if,
+ calc_coverage
+)
+
+
+TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..'))
+
+
+# Env setup_scenario
+@try_manual
+def setup_scenario(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7):
+ pass
+
+
+# Env cleanup_scenario
+@try_manual
+def cleanup_scenario(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7):
+ pass
+
+
+# Testcase: Scenario
+@try_manual
+def call_scenario(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7):
+ setup_scenario(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7)
+ step_workspace_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[
+ test.check("encryption.identity.userAssignedIdentity", "/subscriptions/{subscription_id}/resourceGroups/{rg}/pr"
+ "oviders/Microsoft.ManagedIdentity/userAssignedIdentities/testuai", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[
+ test.check("encryption.identity.userAssignedIdentity", "/subscriptions/{subscription_id}/resourceGroups/{rg}/pr"
+ "oviders/Microsoft.ManagedIdentity/userAssignedIdentities/testuai", case_sensitive=False),
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "test description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey/aabbccdd"
+ "ee112233445566778899aabb", case_sensitive=False),
+ test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGroups/{rg}"
+ "/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False),
+ test.check("encryption.status", "Enabled", case_sensitive=False),
+ test.check("friendlyName", "HelloName", case_sensitive=False),
+ test.check("hbiWorkspace", False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[
+ test.check('length(@)', 1),
+ ])
+ step_workspace_list2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[
+ test.check('length(@)', 2),
+ ])
+ step_workspace_update(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[
+ test.check("location", "eastus2euap", case_sensitive=False),
+ test.check("description", "new description", case_sensitive=False),
+ test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.ins"
+ "ights/components/testinsights", case_sensitive=False),
+ test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.Conta"
+ "inerRegistry/registries/testRegistry", case_sensitive=False),
+ test.check("friendlyName", "New friendly name", case_sensitive=False),
+ test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vault"
+ "s/testkv", case_sensitive=False),
+ test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storag"
+ "e/storageAccounts/{sa}", case_sensitive=False),
+ test.check("name", "{myWorkspace}", case_sensitive=False),
+ ])
+ step_workspace_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_resync_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create4(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_aks_create5(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_show2(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_show3(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_update(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_list_node(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_restart(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_start(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_stop(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_compute_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_service_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_service_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_service_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_machine_learning_service_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_notebook_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_notebook_prepare(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_private_endpoint_connection_put(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_private_endpoint_connection_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[
+ test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False),
+ ])
+ step_private_endpoint_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_private_link_resource_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_quota_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_quota_update(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_storage_account_list_key(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_usage_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_virtual_machine_size_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_list_sku(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_create(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_show(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_connection_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_feature_list(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ step_workspace_delete(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7, checks=[])
+ cleanup_scenario(test, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7)
+
+
+# Test class for Scenario
+@try_manual
+class MachinelearningservicesScenarioTest(ScenarioTest):
+
+ def __init__(self, *args, **kwargs):
+ super(MachinelearningservicesScenarioTest, self).__init__(*args, **kwargs)
+ self.kwargs.update({
+ 'subscription_id': self.get_subscription_id()
+ })
+
+ self.kwargs.update({
+ 'myStorageAccount2': 'default',
+ 'myWorkspace6': 'default',
+ 'myPrivateLinkResource2': 'default',
+ 'myWorkspace3': 'demo_workspace1',
+ 'myWorkspace4': 'demo_workspace2',
+ 'myWorkspace': 'testworkspace',
+ 'myWorkspace2': 'workspaces123',
+ 'myWorkspace5': 'workspace-1',
+ 'myQuota': 'Standard_DSv2_Family_Cluster_Dedicated_vCPUs',
+ 'myPrivateEndpointConnection': '{privateEndpointConnectionName}',
+ 'myPrivateLinkResource': 'Sql',
+ 'myStorageAccount': '/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount',
+ })
+
+
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_workspace-1234'[:7], key='rg',
+ parameter_name='rg')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_accountcrud-1234'[:7], key='rg_2',
+ parameter_name='rg_2')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg'[:7], key='rg_5', parameter_name='rg_5')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_myResourceGroup'[:7], key='rg_4',
+ parameter_name='rg_4')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_testrg123'[:7], key='rg_3',
+ parameter_name='rg_3')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg-1234'[:7], key='rg_6',
+ parameter_name='rg_6')
+ @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1'[:7], key='rg_7',
+ parameter_name='rg_7')
+ @StorageAccountPreparer(name_prefix='clitestmachinelearningservices_testStorageAccount'[:7], key='sa',
+ resource_group_parameter_name='rg_2')
+ def test_machinelearningservices_Scenario(self, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7):
+ call_scenario(self, rg, rg_2, rg_5, rg_4, rg_3, rg_6, rg_7)
+ calc_coverage(__file__)
+ raise_if()
+
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
new file mode 100644
index 00000000000..c9cfdc73e77
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py
@@ -0,0 +1,12 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for
+# license information.
+#
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is
+# regenerated.
+# --------------------------------------------------------------------------
+
+__path__ = __import__('pkgutil').extend_path(__path__, __name__)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
new file mode 100644
index 00000000000..dad2c6eeb01
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py
@@ -0,0 +1,16 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
+
+try:
+ from ._patch import patch_sdk # type: ignore
+ patch_sdk()
+except ImportError:
+ pass
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..2da55795f43
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py
@@ -0,0 +1,134 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.mgmt.core import ARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Optional
+
+ from azure.core.credentials import TokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import MachineLearningComputeOperations
+from .operations import WorkspaceOperations
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import MachineLearningServiceOperations
+from .operations import NotebooksOperations
+from .operations import StorageAccountOperations
+from .operations import WorkspaceConnectionsOperations
+from . import models
+
+
+class AzureMachineLearningWorkspaces(object):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.operations.WorkspacesOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.operations.WorkspaceFeaturesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.operations.QuotasOperations
+ :ivar machine_learning_compute: MachineLearningComputeOperations operations
+ :vartype machine_learning_compute: azure_machine_learning_workspaces.operations.MachineLearningComputeOperations
+ :ivar workspace: WorkspaceOperations operations
+ :vartype workspace: azure_machine_learning_workspaces.operations.WorkspaceOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.operations.PrivateLinkResourcesOperations
+ :ivar machine_learning_service: MachineLearningServiceOperations operations
+ :vartype machine_learning_service: azure_machine_learning_workspaces.operations.MachineLearningServiceOperations
+ :ivar notebooks: NotebooksOperations operations
+ :vartype notebooks: azure_machine_learning_workspaces.operations.NotebooksOperations
+ :ivar storage_account: StorageAccountOperations operations
+ :vartype storage_account: azure_machine_learning_workspaces.operations.StorageAccountOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.operations.WorkspaceConnectionsOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ base_url=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_compute = MachineLearningComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace = WorkspaceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_service = MachineLearningServiceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.notebooks = NotebooksOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.storage_account = StorageAccountOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ def close(self):
+ # type: () -> None
+ self._client.close()
+
+ def __enter__(self):
+ # type: () -> AzureMachineLearningWorkspaces
+ self._client.__enter__()
+ return self
+
+ def __exit__(self, *exc_details):
+ # type: (Any) -> None
+ self._client.__exit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
new file mode 100644
index 00000000000..eee6e6c30ef
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py
@@ -0,0 +1,70 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any
+
+ from azure.core.credentials import TokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials.TokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential, # type: "TokenCredential"
+ subscription_id, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2021-04-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
new file mode 100644
index 00000000000..872474577c4
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py
@@ -0,0 +1,10 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces
+__all__ = ['AzureMachineLearningWorkspaces']
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
new file mode 100644
index 00000000000..6e8f7614dd5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces.py
@@ -0,0 +1,128 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, Optional, TYPE_CHECKING
+
+from azure.mgmt.core import AsyncARMPipelineClient
+from msrest import Deserializer, Serializer
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+from ._configuration import AzureMachineLearningWorkspacesConfiguration
+from .operations import Operations
+from .operations import WorkspacesOperations
+from .operations import WorkspaceFeaturesOperations
+from .operations import UsagesOperations
+from .operations import VirtualMachineSizesOperations
+from .operations import QuotasOperations
+from .operations import MachineLearningComputeOperations
+from .operations import WorkspaceOperations
+from .operations import PrivateEndpointConnectionsOperations
+from .operations import PrivateLinkResourcesOperations
+from .operations import MachineLearningServiceOperations
+from .operations import NotebooksOperations
+from .operations import StorageAccountOperations
+from .operations import WorkspaceConnectionsOperations
+from .. import models
+
+
+class AzureMachineLearningWorkspaces(object):
+ """These APIs allow end users to operate on Azure Machine Learning Workspace resources.
+
+ :ivar operations: Operations operations
+ :vartype operations: azure_machine_learning_workspaces.aio.operations.Operations
+ :ivar workspaces: WorkspacesOperations operations
+ :vartype workspaces: azure_machine_learning_workspaces.aio.operations.WorkspacesOperations
+ :ivar workspace_features: WorkspaceFeaturesOperations operations
+ :vartype workspace_features: azure_machine_learning_workspaces.aio.operations.WorkspaceFeaturesOperations
+ :ivar usages: UsagesOperations operations
+ :vartype usages: azure_machine_learning_workspaces.aio.operations.UsagesOperations
+ :ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
+ :vartype virtual_machine_sizes: azure_machine_learning_workspaces.aio.operations.VirtualMachineSizesOperations
+ :ivar quotas: QuotasOperations operations
+ :vartype quotas: azure_machine_learning_workspaces.aio.operations.QuotasOperations
+ :ivar machine_learning_compute: MachineLearningComputeOperations operations
+ :vartype machine_learning_compute: azure_machine_learning_workspaces.aio.operations.MachineLearningComputeOperations
+ :ivar workspace: WorkspaceOperations operations
+ :vartype workspace: azure_machine_learning_workspaces.aio.operations.WorkspaceOperations
+ :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations
+ :vartype private_endpoint_connections: azure_machine_learning_workspaces.aio.operations.PrivateEndpointConnectionsOperations
+ :ivar private_link_resources: PrivateLinkResourcesOperations operations
+ :vartype private_link_resources: azure_machine_learning_workspaces.aio.operations.PrivateLinkResourcesOperations
+ :ivar machine_learning_service: MachineLearningServiceOperations operations
+ :vartype machine_learning_service: azure_machine_learning_workspaces.aio.operations.MachineLearningServiceOperations
+ :ivar notebooks: NotebooksOperations operations
+ :vartype notebooks: azure_machine_learning_workspaces.aio.operations.NotebooksOperations
+ :ivar storage_account: StorageAccountOperations operations
+ :vartype storage_account: azure_machine_learning_workspaces.aio.operations.StorageAccountOperations
+ :ivar workspace_connections: WorkspaceConnectionsOperations operations
+ :vartype workspace_connections: azure_machine_learning_workspaces.aio.operations.WorkspaceConnectionsOperations
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param str base_url: Service URL
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ base_url: Optional[str] = None,
+ **kwargs: Any
+ ) -> None:
+ if not base_url:
+ base_url = 'https://management.azure.com'
+ self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs)
+ self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
+
+ client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
+ self._serialize = Serializer(client_models)
+ self._deserialize = Deserializer(client_models)
+
+ self.operations = Operations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspaces = WorkspacesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_features = WorkspaceFeaturesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.usages = UsagesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.virtual_machine_sizes = VirtualMachineSizesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.quotas = QuotasOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_compute = MachineLearningComputeOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace = WorkspaceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_endpoint_connections = PrivateEndpointConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.private_link_resources = PrivateLinkResourcesOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.machine_learning_service = MachineLearningServiceOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.notebooks = NotebooksOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.storage_account = StorageAccountOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+ self.workspace_connections = WorkspaceConnectionsOperations(
+ self._client, self._config, self._serialize, self._deserialize)
+
+ async def close(self) -> None:
+ await self._client.close()
+
+ async def __aenter__(self) -> "AzureMachineLearningWorkspaces":
+ await self._client.__aenter__()
+ return self
+
+ async def __aexit__(self, *exc_details) -> None:
+ await self._client.__aexit__(*exc_details)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
new file mode 100644
index 00000000000..51c8cdda64b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration.py
@@ -0,0 +1,66 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from typing import Any, TYPE_CHECKING
+
+from azure.core.configuration import Configuration
+from azure.core.pipeline import policies
+from azure.mgmt.core.policies import ARMHttpLoggingPolicy
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from azure.core.credentials_async import AsyncTokenCredential
+
+VERSION = "unknown"
+
+class AzureMachineLearningWorkspacesConfiguration(Configuration):
+ """Configuration for AzureMachineLearningWorkspaces.
+
+ Note that all parameters used to create this instance are saved as instance
+ attributes.
+
+ :param credential: Credential needed for the client to connect to Azure.
+ :type credential: ~azure.core.credentials_async.AsyncTokenCredential
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ """
+
+ def __init__(
+ self,
+ credential: "AsyncTokenCredential",
+ subscription_id: str,
+ **kwargs: Any
+ ) -> None:
+ if credential is None:
+ raise ValueError("Parameter 'credential' must not be None.")
+ if subscription_id is None:
+ raise ValueError("Parameter 'subscription_id' must not be None.")
+ super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs)
+
+ self.credential = credential
+ self.subscription_id = subscription_id
+ self.api_version = "2021-04-01"
+ self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
+ kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION))
+ self._configure(**kwargs)
+
+ def _configure(
+ self,
+ **kwargs: Any
+ ) -> None:
+ self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
+ self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
+ self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
+ self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
+ self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
+ self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
+ self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
+ self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
+ self.authentication_policy = kwargs.get('authentication_policy')
+ if self.credential and not self.authentication_policy:
+ self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
new file mode 100644
index 00000000000..7dc21ac7c33
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/__init__.py
@@ -0,0 +1,39 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._machine_learning_compute_operations import MachineLearningComputeOperations
+from ._workspace_operations import WorkspaceOperations
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._machine_learning_service_operations import MachineLearningServiceOperations
+from ._notebooks_operations import NotebooksOperations
+from ._storage_account_operations import StorageAccountOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'WorkspaceFeaturesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'MachineLearningComputeOperations',
+ 'WorkspaceOperations',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'MachineLearningServiceOperations',
+ 'NotebooksOperations',
+ 'StorageAccountOperations',
+ 'WorkspaceConnectionsOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py
new file mode 100644
index 00000000000..95500780057
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_compute_operations.py
@@ -0,0 +1,1026 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningComputeOperations:
+ """MachineLearningComputeOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedComputeResourcesList"]:
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeResource":
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ComputeResource",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> "models.ComputeResource":
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ parameters: "models.ClusterUpdateParameters",
+ **kwargs
+ ) -> AsyncLROPoller["models.ComputeResource"]:
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ underlying_resource_action: Union[str, "models.UnderlyingResourceAction"],
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.AmlComputeNodesInformation"]:
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> "models.ComputeSecrets":
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ async def _start_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._start_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def begin_start(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._start_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ async def _stop_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._stop_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def begin_stop(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._stop_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ async def restart(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ compute_name: str,
+ **kwargs
+ ) -> None:
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_service_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_service_operations.py
new file mode 100644
index 00000000000..02ebc8c5835
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_machine_learning_service_operations.py
@@ -0,0 +1,435 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningServiceOperations:
+ """MachineLearningServiceOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ skip: Optional[str] = None,
+ model_id: Optional[str] = None,
+ model_name: Optional[str] = None,
+ tag: Optional[str] = None,
+ tags: Optional[str] = None,
+ properties: Optional[str] = None,
+ run_id: Optional[str] = None,
+ expand: Optional[bool] = None,
+ orderby: Optional[Union[str, "models.OrderString"]] = "UpdatedAtDesc",
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedServiceList"]:
+ """Gets services in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param model_id: The Model Id.
+ :type model_id: str
+ :param model_name: The Model name.
+ :type model_name: str
+ :param tag: The object tag.
+ :type tag: str
+ :param tags: A set of tags with which to filter the returned services. It is a comma separated
+ string of tags key or tags key=value Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned services. It is a
+ comma separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param run_id: runId for model associated with service.
+ :type run_id: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :param orderby: The option to order the response.
+ :type orderby: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedServiceList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedServiceList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedServiceList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if model_id is not None:
+ query_parameters['modelId'] = self._serialize.query("model_id", model_id, 'str')
+ if model_name is not None:
+ query_parameters['modelName'] = self._serialize.query("model_name", model_name, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if run_id is not None:
+ query_parameters['runId'] = self._serialize.query("run_id", run_id, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+ if orderby is not None:
+ query_parameters['orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedServiceList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ expand: Optional[bool] = False,
+ **kwargs
+ ) -> "models.ServiceResource":
+ """Get a Service by name.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ServiceResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ServiceResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a specific Service..
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ properties: "models.CreateServiceRequest",
+ **kwargs
+ ) -> Optional["models.ServiceResource"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ServiceResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'CreateServiceRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ service_name: str,
+ properties: "models.CreateServiceRequest",
+ **kwargs
+ ) -> AsyncLROPoller["models.ServiceResource"]:
+ """Creates or updates service. This call will update a service if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new service, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param properties: The payload that is used to create or update the Service.
+ :type properties: ~azure_machine_learning_workspaces.models.CreateServiceRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either ServiceResource or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ServiceResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py
new file mode 100644
index 00000000000..1722e37fb96
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_notebooks_operations.py
@@ -0,0 +1,219 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class NotebooksOperations:
+ """NotebooksOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def _prepare_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> Optional["models.NotebookResourceInfo"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def begin_prepare(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller["models.NotebookResourceInfo"]:
+ """prepare.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._prepare_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListNotebookKeysResult":
+ """list_keys.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
new file mode 100644
index 00000000000..e8808d62e42
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations:
+ """Operations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.OperationListResult"]:
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..5a29c67ab88
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,238 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations:
+ """PrivateEndpointConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def put(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ properties: "models.PrivateEndpointConnection",
+ **kwargs
+ ) -> "models.PrivateEndpointConnection":
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.put.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ private_endpoint_connection_name: str,
+ **kwargs
+ ) -> None:
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..f76e651b755
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_private_link_resources_operations.py
@@ -0,0 +1,99 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations:
+ """PrivateLinkResourcesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list_by_workspace(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.PrivateLinkResourceListResult":
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
new file mode 100644
index 00000000000..734b7af9491
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_quotas_operations.py
@@ -0,0 +1,176 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations:
+ """QuotasOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def update(
+ self,
+ location: str,
+ parameters: "models.QuotaUpdateParameters",
+ **kwargs
+ ) -> "models.UpdateWorkspaceQuotasResult":
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListWorkspaceQuotas"]:
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_storage_account_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_storage_account_operations.py
new file mode 100644
index 00000000000..e03fb941a55
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_storage_account_operations.py
@@ -0,0 +1,100 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class StorageAccountOperations:
+ """StorageAccountOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListStorageAccountKeysResult":
+ """list_keys.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListStorageAccountKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListStorageAccountKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListStorageAccountKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
new file mode 100644
index 00000000000..39843bd8971
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_usages_operations.py
@@ -0,0 +1,113 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations:
+ """UsagesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListUsagesResult"]:
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..456d8129ba1
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,95 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations:
+ """VirtualMachineSizesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def list(
+ self,
+ location: str,
+ **kwargs
+ ) -> "models.VirtualMachineSizeListResult":
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..fa14649738e
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_connections_operations.py
@@ -0,0 +1,321 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations:
+ """WorkspaceConnectionsOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ target: Optional[str] = None,
+ category: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.PaginatedWorkspaceConnectionsList"]:
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ async def create(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ parameters: "models.WorkspaceConnectionDto",
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Add a new workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnectionDto
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> "models.WorkspaceConnection":
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ async def delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ connection_name: str,
+ **kwargs
+ ) -> None:
+ """Delete a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..e80ed1b839b
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_features_operations.py
@@ -0,0 +1,117 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations:
+ """WorkspaceFeaturesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncIterable["models.ListAmlUserFeatureResult"]:
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_operations.py
new file mode 100644
index 00000000000..e7cca8e4ea0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspace_operations.py
@@ -0,0 +1,109 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceOperations:
+ """WorkspaceOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_skus(
+ self,
+ **kwargs
+ ) -> AsyncIterable["models.SkuListResult"]:
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_skus.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..5d5e7ab92bf
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations/_workspaces_operations.py
@@ -0,0 +1,786 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
+import warnings
+
+from azure.core.async_paging import AsyncItemPaged, AsyncList
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
+from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
+
+from ... import models
+
+T = TypeVar('T')
+ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations:
+ """WorkspacesOperations async operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer) -> None:
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ async def get(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.Workspace":
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _create_or_update_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> Optional["models.Workspace"]:
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_create_or_update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.Workspace",
+ **kwargs
+ ) -> AsyncLROPoller["models.Workspace"]:
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def _delete_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def begin_delete(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ async def update(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ parameters: "models.WorkspaceUpdateParameters",
+ **kwargs
+ ) -> "models.Workspace":
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name: str,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.ListWorkspaceKeysResult":
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ async def _resync_keys_initial(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> None:
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resync_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _resync_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ async def begin_resync_keys(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> AsyncLROPoller[None]:
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.AsyncLROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = await self._resync_keys_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = AsyncNoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return AsyncLROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skip: Optional[str] = None,
+ **kwargs
+ ) -> AsyncIterable["models.WorkspaceListResult"]:
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ async def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, AsyncList(list_of_elem)
+
+ async def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return AsyncItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ async def list_notebook_access_token(
+ self,
+ resource_group_name: str,
+ workspace_name: str,
+ **kwargs
+ ) -> "models.NotebookAccessTokenResult":
+ """return notebook access token and refresh token.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: NotebookAccessTokenResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.NotebookAccessTokenResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookAccessTokenResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_access_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
new file mode 100644
index 00000000000..c6171e68ba5
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py
@@ -0,0 +1,628 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+try:
+ from ._models_py3 import AciServiceCreateRequest
+ from ._models_py3 import AciServiceCreateRequestDataCollection
+ from ._models_py3 import AciServiceCreateRequestEncryptionProperties
+ from ._models_py3 import AciServiceCreateRequestVnetConfiguration
+ from ._models_py3 import AciServiceResponse
+ from ._models_py3 import AciServiceResponseDataCollection
+ from ._models_py3 import AciServiceResponseEncryptionProperties
+ from ._models_py3 import AciServiceResponseEnvironmentImageRequest
+ from ._models_py3 import AciServiceResponseVnetConfiguration
+ from ._models_py3 import Aks
+ from ._models_py3 import AksComputeSecrets
+ from ._models_py3 import AksNetworkingConfiguration
+ from ._models_py3 import AksProperties
+ from ._models_py3 import AksReplicaStatus
+ from ._models_py3 import AksReplicaStatusError
+ from ._models_py3 import AksServiceCreateRequest
+ from ._models_py3 import AksServiceCreateRequestAutoScaler
+ from ._models_py3 import AksServiceCreateRequestDataCollection
+ from ._models_py3 import AksServiceCreateRequestLivenessProbeRequirements
+ from ._models_py3 import AksServiceResponse
+ from ._models_py3 import AksServiceResponseAutoScaler
+ from ._models_py3 import AksServiceResponseDataCollection
+ from ._models_py3 import AksServiceResponseDeploymentStatus
+ from ._models_py3 import AksServiceResponseEnvironmentImageRequest
+ from ._models_py3 import AksServiceResponseLivenessProbeRequirements
+ from ._models_py3 import AksVariantResponse
+ from ._models_py3 import AmlCompute
+ from ._models_py3 import AmlComputeNodeInformation
+ from ._models_py3 import AmlComputeNodesInformation
+ from ._models_py3 import AmlComputeProperties
+ from ._models_py3 import AmlUserFeature
+ from ._models_py3 import AssignedUser
+ from ._models_py3 import AuthKeys
+ from ._models_py3 import AutoPauseProperties
+ from ._models_py3 import AutoScaleProperties
+ from ._models_py3 import AutoScaler
+ from ._models_py3 import ClusterUpdateParameters
+ from ._models_py3 import Compute
+ from ._models_py3 import ComputeInstance
+ from ._models_py3 import ComputeInstanceApplication
+ from ._models_py3 import ComputeInstanceConnectivityEndpoints
+ from ._models_py3 import ComputeInstanceCreatedBy
+ from ._models_py3 import ComputeInstanceLastOperation
+ from ._models_py3 import ComputeInstanceProperties
+ from ._models_py3 import ComputeInstanceSshSettings
+ from ._models_py3 import ComputeNodesInformation
+ from ._models_py3 import ComputeResource
+ from ._models_py3 import ComputeSecrets
+ from ._models_py3 import ContainerRegistry
+ from ._models_py3 import ContainerRegistryResponse
+ from ._models_py3 import ContainerResourceRequirements
+ from ._models_py3 import CosmosDbSettings
+ from ._models_py3 import CreateEndpointVariantRequest
+ from ._models_py3 import CreateServiceRequest
+ from ._models_py3 import CreateServiceRequestEnvironmentImageRequest
+ from ._models_py3 import CreateServiceRequestKeys
+ from ._models_py3 import DataFactory
+ from ._models_py3 import DataLakeAnalytics
+ from ._models_py3 import DataLakeAnalyticsProperties
+ from ._models_py3 import Databricks
+ from ._models_py3 import DatabricksComputeSecrets
+ from ._models_py3 import DatabricksProperties
+ from ._models_py3 import DatasetReference
+ from ._models_py3 import EncryptionProperties
+ from ._models_py3 import EncryptionProperty
+ from ._models_py3 import EnvironmentImageRequest
+ from ._models_py3 import EnvironmentImageRequestEnvironment
+ from ._models_py3 import EnvironmentImageRequestEnvironmentReference
+ from ._models_py3 import EnvironmentImageResponse
+ from ._models_py3 import EnvironmentImageResponseEnvironment
+ from ._models_py3 import EnvironmentImageResponseEnvironmentReference
+ from ._models_py3 import EnvironmentReference
+ from ._models_py3 import ErrorDetail
+ from ._models_py3 import ErrorResponse
+ from ._models_py3 import EstimatedVmPrice
+ from ._models_py3 import EstimatedVmPrices
+ from ._models_py3 import HdInsight
+ from ._models_py3 import HdInsightProperties
+ from ._models_py3 import Identity
+ from ._models_py3 import IdentityForCmk
+ from ._models_py3 import ImageAsset
+ from ._models_py3 import KeyVaultProperties
+ from ._models_py3 import ListAmlUserFeatureResult
+ from ._models_py3 import ListNotebookKeysResult
+ from ._models_py3 import ListStorageAccountKeysResult
+ from ._models_py3 import ListUsagesResult
+ from ._models_py3 import ListWorkspaceKeysResult
+ from ._models_py3 import ListWorkspaceQuotas
+ from ._models_py3 import LivenessProbeRequirements
+ from ._models_py3 import MachineLearningServiceError
+ from ._models_py3 import Model
+ from ._models_py3 import ModelDataCollection
+ from ._models_py3 import ModelDockerSection
+ from ._models_py3 import ModelDockerSectionBaseImageRegistry
+ from ._models_py3 import ModelDockerSectionResponse
+ from ._models_py3 import ModelDockerSectionResponseBaseImageRegistry
+ from ._models_py3 import ModelEnvironmentDefinition
+ from ._models_py3 import ModelEnvironmentDefinitionDocker
+ from ._models_py3 import ModelEnvironmentDefinitionPython
+ from ._models_py3 import ModelEnvironmentDefinitionR
+ from ._models_py3 import ModelEnvironmentDefinitionResponse
+ from ._models_py3 import ModelEnvironmentDefinitionResponseDocker
+ from ._models_py3 import ModelEnvironmentDefinitionResponsePython
+ from ._models_py3 import ModelEnvironmentDefinitionResponseR
+ from ._models_py3 import ModelEnvironmentDefinitionResponseSpark
+ from ._models_py3 import ModelEnvironmentDefinitionSpark
+ from ._models_py3 import ModelPythonSection
+ from ._models_py3 import ModelSparkSection
+ from ._models_py3 import NodeStateCounts
+ from ._models_py3 import NotebookAccessTokenResult
+ from ._models_py3 import NotebookPreparationError
+ from ._models_py3 import NotebookResourceInfo
+ from ._models_py3 import Operation
+ from ._models_py3 import OperationDisplay
+ from ._models_py3 import OperationListResult
+ from ._models_py3 import PaginatedComputeResourcesList
+ from ._models_py3 import PaginatedServiceList
+ from ._models_py3 import PaginatedWorkspaceConnectionsList
+ from ._models_py3 import Password
+ from ._models_py3 import PersonalComputeInstanceSettings
+ from ._models_py3 import PrivateEndpoint
+ from ._models_py3 import PrivateEndpointConnection
+ from ._models_py3 import PrivateLinkResource
+ from ._models_py3 import PrivateLinkResourceListResult
+ from ._models_py3 import PrivateLinkServiceConnectionState
+ from ._models_py3 import QuotaBaseProperties
+ from ._models_py3 import QuotaUpdateParameters
+ from ._models_py3 import RCranPackage
+ from ._models_py3 import RGitHubPackage
+ from ._models_py3 import RGitHubPackageResponse
+ from ._models_py3 import RSection
+ from ._models_py3 import RSectionResponse
+ from ._models_py3 import RegistryListCredentialsResult
+ from ._models_py3 import Resource
+ from ._models_py3 import ResourceId
+ from ._models_py3 import ResourceName
+ from ._models_py3 import ResourceQuota
+ from ._models_py3 import ResourceSkuLocationInfo
+ from ._models_py3 import ResourceSkuZoneDetails
+ from ._models_py3 import Restriction
+ from ._models_py3 import ScaleSettings
+ from ._models_py3 import ScriptReference
+ from ._models_py3 import ScriptsToExecute
+ from ._models_py3 import ServiceManagedResourcesSettings
+ from ._models_py3 import ServicePrincipalCredentials
+ from ._models_py3 import ServiceResource
+ from ._models_py3 import ServiceResponseBase
+ from ._models_py3 import ServiceResponseBaseError
+ from ._models_py3 import SetupScripts
+ from ._models_py3 import SharedPrivateLinkResource
+ from ._models_py3 import Sku
+ from ._models_py3 import SkuCapability
+ from ._models_py3 import SkuListResult
+ from ._models_py3 import SparkMavenPackage
+ from ._models_py3 import SslConfiguration
+ from ._models_py3 import SynapseSpark
+ from ._models_py3 import SynapseSparkPoolProperties
+ from ._models_py3 import SynapseSparkPoolPropertiesautogenerated
+ from ._models_py3 import SystemData
+ from ._models_py3 import SystemService
+ from ._models_py3 import UpdateWorkspaceQuotas
+ from ._models_py3 import UpdateWorkspaceQuotasResult
+ from ._models_py3 import Usage
+ from ._models_py3 import UsageName
+ from ._models_py3 import UserAccountCredentials
+ from ._models_py3 import UserAssignedIdentity
+ from ._models_py3 import VirtualMachine
+ from ._models_py3 import VirtualMachineImage
+ from ._models_py3 import VirtualMachineProperties
+ from ._models_py3 import VirtualMachineSecrets
+ from ._models_py3 import VirtualMachineSize
+ from ._models_py3 import VirtualMachineSizeListResult
+ from ._models_py3 import VirtualMachineSshCredentials
+ from ._models_py3 import VnetConfiguration
+ from ._models_py3 import Workspace
+ from ._models_py3 import WorkspaceConnection
+ from ._models_py3 import WorkspaceConnectionDto
+ from ._models_py3 import WorkspaceListResult
+ from ._models_py3 import WorkspaceSku
+ from ._models_py3 import WorkspaceUpdateParameters
+except (SyntaxError, ImportError):
+ from ._models import AciServiceCreateRequest # type: ignore
+ from ._models import AciServiceCreateRequestDataCollection # type: ignore
+ from ._models import AciServiceCreateRequestEncryptionProperties # type: ignore
+ from ._models import AciServiceCreateRequestVnetConfiguration # type: ignore
+ from ._models import AciServiceResponse # type: ignore
+ from ._models import AciServiceResponseDataCollection # type: ignore
+ from ._models import AciServiceResponseEncryptionProperties # type: ignore
+ from ._models import AciServiceResponseEnvironmentImageRequest # type: ignore
+ from ._models import AciServiceResponseVnetConfiguration # type: ignore
+ from ._models import Aks # type: ignore
+ from ._models import AksComputeSecrets # type: ignore
+ from ._models import AksNetworkingConfiguration # type: ignore
+ from ._models import AksProperties # type: ignore
+ from ._models import AksReplicaStatus # type: ignore
+ from ._models import AksReplicaStatusError # type: ignore
+ from ._models import AksServiceCreateRequest # type: ignore
+ from ._models import AksServiceCreateRequestAutoScaler # type: ignore
+ from ._models import AksServiceCreateRequestDataCollection # type: ignore
+ from ._models import AksServiceCreateRequestLivenessProbeRequirements # type: ignore
+ from ._models import AksServiceResponse # type: ignore
+ from ._models import AksServiceResponseAutoScaler # type: ignore
+ from ._models import AksServiceResponseDataCollection # type: ignore
+ from ._models import AksServiceResponseDeploymentStatus # type: ignore
+ from ._models import AksServiceResponseEnvironmentImageRequest # type: ignore
+ from ._models import AksServiceResponseLivenessProbeRequirements # type: ignore
+ from ._models import AksVariantResponse # type: ignore
+ from ._models import AmlCompute # type: ignore
+ from ._models import AmlComputeNodeInformation # type: ignore
+ from ._models import AmlComputeNodesInformation # type: ignore
+ from ._models import AmlComputeProperties # type: ignore
+ from ._models import AmlUserFeature # type: ignore
+ from ._models import AssignedUser # type: ignore
+ from ._models import AuthKeys # type: ignore
+ from ._models import AutoPauseProperties # type: ignore
+ from ._models import AutoScaleProperties # type: ignore
+ from ._models import AutoScaler # type: ignore
+ from ._models import ClusterUpdateParameters # type: ignore
+ from ._models import Compute # type: ignore
+ from ._models import ComputeInstance # type: ignore
+ from ._models import ComputeInstanceApplication # type: ignore
+ from ._models import ComputeInstanceConnectivityEndpoints # type: ignore
+ from ._models import ComputeInstanceCreatedBy # type: ignore
+ from ._models import ComputeInstanceLastOperation # type: ignore
+ from ._models import ComputeInstanceProperties # type: ignore
+ from ._models import ComputeInstanceSshSettings # type: ignore
+ from ._models import ComputeNodesInformation # type: ignore
+ from ._models import ComputeResource # type: ignore
+ from ._models import ComputeSecrets # type: ignore
+ from ._models import ContainerRegistry # type: ignore
+ from ._models import ContainerRegistryResponse # type: ignore
+ from ._models import ContainerResourceRequirements # type: ignore
+ from ._models import CosmosDbSettings # type: ignore
+ from ._models import CreateEndpointVariantRequest # type: ignore
+ from ._models import CreateServiceRequest # type: ignore
+ from ._models import CreateServiceRequestEnvironmentImageRequest # type: ignore
+ from ._models import CreateServiceRequestKeys # type: ignore
+ from ._models import DataFactory # type: ignore
+ from ._models import DataLakeAnalytics # type: ignore
+ from ._models import DataLakeAnalyticsProperties # type: ignore
+ from ._models import Databricks # type: ignore
+ from ._models import DatabricksComputeSecrets # type: ignore
+ from ._models import DatabricksProperties # type: ignore
+ from ._models import DatasetReference # type: ignore
+ from ._models import EncryptionProperties # type: ignore
+ from ._models import EncryptionProperty # type: ignore
+ from ._models import EnvironmentImageRequest # type: ignore
+ from ._models import EnvironmentImageRequestEnvironment # type: ignore
+ from ._models import EnvironmentImageRequestEnvironmentReference # type: ignore
+ from ._models import EnvironmentImageResponse # type: ignore
+ from ._models import EnvironmentImageResponseEnvironment # type: ignore
+ from ._models import EnvironmentImageResponseEnvironmentReference # type: ignore
+ from ._models import EnvironmentReference # type: ignore
+ from ._models import ErrorDetail # type: ignore
+ from ._models import ErrorResponse # type: ignore
+ from ._models import EstimatedVmPrice # type: ignore
+ from ._models import EstimatedVmPrices # type: ignore
+ from ._models import HdInsight # type: ignore
+ from ._models import HdInsightProperties # type: ignore
+ from ._models import Identity # type: ignore
+ from ._models import IdentityForCmk # type: ignore
+ from ._models import ImageAsset # type: ignore
+ from ._models import KeyVaultProperties # type: ignore
+ from ._models import ListAmlUserFeatureResult # type: ignore
+ from ._models import ListNotebookKeysResult # type: ignore
+ from ._models import ListStorageAccountKeysResult # type: ignore
+ from ._models import ListUsagesResult # type: ignore
+ from ._models import ListWorkspaceKeysResult # type: ignore
+ from ._models import ListWorkspaceQuotas # type: ignore
+ from ._models import LivenessProbeRequirements # type: ignore
+ from ._models import MachineLearningServiceError # type: ignore
+ from ._models import Model # type: ignore
+ from ._models import ModelDataCollection # type: ignore
+ from ._models import ModelDockerSection # type: ignore
+ from ._models import ModelDockerSectionBaseImageRegistry # type: ignore
+ from ._models import ModelDockerSectionResponse # type: ignore
+ from ._models import ModelDockerSectionResponseBaseImageRegistry # type: ignore
+ from ._models import ModelEnvironmentDefinition # type: ignore
+ from ._models import ModelEnvironmentDefinitionDocker # type: ignore
+ from ._models import ModelEnvironmentDefinitionPython # type: ignore
+ from ._models import ModelEnvironmentDefinitionR # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponse # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponseDocker # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponsePython # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponseR # type: ignore
+ from ._models import ModelEnvironmentDefinitionResponseSpark # type: ignore
+ from ._models import ModelEnvironmentDefinitionSpark # type: ignore
+ from ._models import ModelPythonSection # type: ignore
+ from ._models import ModelSparkSection # type: ignore
+ from ._models import NodeStateCounts # type: ignore
+ from ._models import NotebookAccessTokenResult # type: ignore
+ from ._models import NotebookPreparationError # type: ignore
+ from ._models import NotebookResourceInfo # type: ignore
+ from ._models import Operation # type: ignore
+ from ._models import OperationDisplay # type: ignore
+ from ._models import OperationListResult # type: ignore
+ from ._models import PaginatedComputeResourcesList # type: ignore
+ from ._models import PaginatedServiceList # type: ignore
+ from ._models import PaginatedWorkspaceConnectionsList # type: ignore
+ from ._models import Password # type: ignore
+ from ._models import PersonalComputeInstanceSettings # type: ignore
+ from ._models import PrivateEndpoint # type: ignore
+ from ._models import PrivateEndpointConnection # type: ignore
+ from ._models import PrivateLinkResource # type: ignore
+ from ._models import PrivateLinkResourceListResult # type: ignore
+ from ._models import PrivateLinkServiceConnectionState # type: ignore
+ from ._models import QuotaBaseProperties # type: ignore
+ from ._models import QuotaUpdateParameters # type: ignore
+ from ._models import RCranPackage # type: ignore
+ from ._models import RGitHubPackage # type: ignore
+ from ._models import RGitHubPackageResponse # type: ignore
+ from ._models import RSection # type: ignore
+ from ._models import RSectionResponse # type: ignore
+ from ._models import RegistryListCredentialsResult # type: ignore
+ from ._models import Resource # type: ignore
+ from ._models import ResourceId # type: ignore
+ from ._models import ResourceName # type: ignore
+ from ._models import ResourceQuota # type: ignore
+ from ._models import ResourceSkuLocationInfo # type: ignore
+ from ._models import ResourceSkuZoneDetails # type: ignore
+ from ._models import Restriction # type: ignore
+ from ._models import ScaleSettings # type: ignore
+ from ._models import ScriptReference # type: ignore
+ from ._models import ScriptsToExecute # type: ignore
+ from ._models import ServiceManagedResourcesSettings # type: ignore
+ from ._models import ServicePrincipalCredentials # type: ignore
+ from ._models import ServiceResource # type: ignore
+ from ._models import ServiceResponseBase # type: ignore
+ from ._models import ServiceResponseBaseError # type: ignore
+ from ._models import SetupScripts # type: ignore
+ from ._models import SharedPrivateLinkResource # type: ignore
+ from ._models import Sku # type: ignore
+ from ._models import SkuCapability # type: ignore
+ from ._models import SkuListResult # type: ignore
+ from ._models import SparkMavenPackage # type: ignore
+ from ._models import SslConfiguration # type: ignore
+ from ._models import SynapseSpark # type: ignore
+ from ._models import SynapseSparkPoolProperties # type: ignore
+ from ._models import SynapseSparkPoolPropertiesautogenerated # type: ignore
+ from ._models import SystemData # type: ignore
+ from ._models import SystemService # type: ignore
+ from ._models import UpdateWorkspaceQuotas # type: ignore
+ from ._models import UpdateWorkspaceQuotasResult # type: ignore
+ from ._models import Usage # type: ignore
+ from ._models import UsageName # type: ignore
+ from ._models import UserAccountCredentials # type: ignore
+ from ._models import UserAssignedIdentity # type: ignore
+ from ._models import VirtualMachine # type: ignore
+ from ._models import VirtualMachineImage # type: ignore
+ from ._models import VirtualMachineProperties # type: ignore
+ from ._models import VirtualMachineSecrets # type: ignore
+ from ._models import VirtualMachineSize # type: ignore
+ from ._models import VirtualMachineSizeListResult # type: ignore
+ from ._models import VirtualMachineSshCredentials # type: ignore
+ from ._models import VnetConfiguration # type: ignore
+ from ._models import Workspace # type: ignore
+ from ._models import WorkspaceConnection # type: ignore
+ from ._models import WorkspaceConnectionDto # type: ignore
+ from ._models import WorkspaceListResult # type: ignore
+ from ._models import WorkspaceSku # type: ignore
+ from ._models import WorkspaceUpdateParameters # type: ignore
+
+from ._azure_machine_learning_workspaces_enums import (
+ AllocationState,
+ ApplicationSharingPolicy,
+ BillingCurrency,
+ ClusterPurpose,
+ ComputeEnvironmentType,
+ ComputeInstanceAuthorizationType,
+ ComputeInstanceState,
+ ComputeType,
+ DeploymentType,
+ EncryptionStatus,
+ IdentityType,
+ LoadBalancerType,
+ NodeState,
+ OperationName,
+ OperationStatus,
+ OrderString,
+ OsType,
+ PrivateEndpointConnectionProvisioningState,
+ PrivateEndpointServiceConnectionStatus,
+ ProvisioningState,
+ QuotaUnit,
+ ReasonCode,
+ RemoteLoginPortPublicAccess,
+ ResourceIdentityType,
+ SshPublicAccess,
+ SslConfigurationStatus,
+ Status,
+ UnderlyingResourceAction,
+ UnitOfMeasure,
+ UsageUnit,
+ ValueFormat,
+ VariantType,
+ VmPriceOsType,
+ VmPriority,
+ VmTier,
+ WebServiceState,
+)
+
+__all__ = [
+ 'AciServiceCreateRequest',
+ 'AciServiceCreateRequestDataCollection',
+ 'AciServiceCreateRequestEncryptionProperties',
+ 'AciServiceCreateRequestVnetConfiguration',
+ 'AciServiceResponse',
+ 'AciServiceResponseDataCollection',
+ 'AciServiceResponseEncryptionProperties',
+ 'AciServiceResponseEnvironmentImageRequest',
+ 'AciServiceResponseVnetConfiguration',
+ 'Aks',
+ 'AksComputeSecrets',
+ 'AksNetworkingConfiguration',
+ 'AksProperties',
+ 'AksReplicaStatus',
+ 'AksReplicaStatusError',
+ 'AksServiceCreateRequest',
+ 'AksServiceCreateRequestAutoScaler',
+ 'AksServiceCreateRequestDataCollection',
+ 'AksServiceCreateRequestLivenessProbeRequirements',
+ 'AksServiceResponse',
+ 'AksServiceResponseAutoScaler',
+ 'AksServiceResponseDataCollection',
+ 'AksServiceResponseDeploymentStatus',
+ 'AksServiceResponseEnvironmentImageRequest',
+ 'AksServiceResponseLivenessProbeRequirements',
+ 'AksVariantResponse',
+ 'AmlCompute',
+ 'AmlComputeNodeInformation',
+ 'AmlComputeNodesInformation',
+ 'AmlComputeProperties',
+ 'AmlUserFeature',
+ 'AssignedUser',
+ 'AuthKeys',
+ 'AutoPauseProperties',
+ 'AutoScaleProperties',
+ 'AutoScaler',
+ 'ClusterUpdateParameters',
+ 'Compute',
+ 'ComputeInstance',
+ 'ComputeInstanceApplication',
+ 'ComputeInstanceConnectivityEndpoints',
+ 'ComputeInstanceCreatedBy',
+ 'ComputeInstanceLastOperation',
+ 'ComputeInstanceProperties',
+ 'ComputeInstanceSshSettings',
+ 'ComputeNodesInformation',
+ 'ComputeResource',
+ 'ComputeSecrets',
+ 'ContainerRegistry',
+ 'ContainerRegistryResponse',
+ 'ContainerResourceRequirements',
+ 'CosmosDbSettings',
+ 'CreateEndpointVariantRequest',
+ 'CreateServiceRequest',
+ 'CreateServiceRequestEnvironmentImageRequest',
+ 'CreateServiceRequestKeys',
+ 'DataFactory',
+ 'DataLakeAnalytics',
+ 'DataLakeAnalyticsProperties',
+ 'Databricks',
+ 'DatabricksComputeSecrets',
+ 'DatabricksProperties',
+ 'DatasetReference',
+ 'EncryptionProperties',
+ 'EncryptionProperty',
+ 'EnvironmentImageRequest',
+ 'EnvironmentImageRequestEnvironment',
+ 'EnvironmentImageRequestEnvironmentReference',
+ 'EnvironmentImageResponse',
+ 'EnvironmentImageResponseEnvironment',
+ 'EnvironmentImageResponseEnvironmentReference',
+ 'EnvironmentReference',
+ 'ErrorDetail',
+ 'ErrorResponse',
+ 'EstimatedVmPrice',
+ 'EstimatedVmPrices',
+ 'HdInsight',
+ 'HdInsightProperties',
+ 'Identity',
+ 'IdentityForCmk',
+ 'ImageAsset',
+ 'KeyVaultProperties',
+ 'ListAmlUserFeatureResult',
+ 'ListNotebookKeysResult',
+ 'ListStorageAccountKeysResult',
+ 'ListUsagesResult',
+ 'ListWorkspaceKeysResult',
+ 'ListWorkspaceQuotas',
+ 'LivenessProbeRequirements',
+ 'MachineLearningServiceError',
+ 'Model',
+ 'ModelDataCollection',
+ 'ModelDockerSection',
+ 'ModelDockerSectionBaseImageRegistry',
+ 'ModelDockerSectionResponse',
+ 'ModelDockerSectionResponseBaseImageRegistry',
+ 'ModelEnvironmentDefinition',
+ 'ModelEnvironmentDefinitionDocker',
+ 'ModelEnvironmentDefinitionPython',
+ 'ModelEnvironmentDefinitionR',
+ 'ModelEnvironmentDefinitionResponse',
+ 'ModelEnvironmentDefinitionResponseDocker',
+ 'ModelEnvironmentDefinitionResponsePython',
+ 'ModelEnvironmentDefinitionResponseR',
+ 'ModelEnvironmentDefinitionResponseSpark',
+ 'ModelEnvironmentDefinitionSpark',
+ 'ModelPythonSection',
+ 'ModelSparkSection',
+ 'NodeStateCounts',
+ 'NotebookAccessTokenResult',
+ 'NotebookPreparationError',
+ 'NotebookResourceInfo',
+ 'Operation',
+ 'OperationDisplay',
+ 'OperationListResult',
+ 'PaginatedComputeResourcesList',
+ 'PaginatedServiceList',
+ 'PaginatedWorkspaceConnectionsList',
+ 'Password',
+ 'PersonalComputeInstanceSettings',
+ 'PrivateEndpoint',
+ 'PrivateEndpointConnection',
+ 'PrivateLinkResource',
+ 'PrivateLinkResourceListResult',
+ 'PrivateLinkServiceConnectionState',
+ 'QuotaBaseProperties',
+ 'QuotaUpdateParameters',
+ 'RCranPackage',
+ 'RGitHubPackage',
+ 'RGitHubPackageResponse',
+ 'RSection',
+ 'RSectionResponse',
+ 'RegistryListCredentialsResult',
+ 'Resource',
+ 'ResourceId',
+ 'ResourceName',
+ 'ResourceQuota',
+ 'ResourceSkuLocationInfo',
+ 'ResourceSkuZoneDetails',
+ 'Restriction',
+ 'ScaleSettings',
+ 'ScriptReference',
+ 'ScriptsToExecute',
+ 'ServiceManagedResourcesSettings',
+ 'ServicePrincipalCredentials',
+ 'ServiceResource',
+ 'ServiceResponseBase',
+ 'ServiceResponseBaseError',
+ 'SetupScripts',
+ 'SharedPrivateLinkResource',
+ 'Sku',
+ 'SkuCapability',
+ 'SkuListResult',
+ 'SparkMavenPackage',
+ 'SslConfiguration',
+ 'SynapseSpark',
+ 'SynapseSparkPoolProperties',
+ 'SynapseSparkPoolPropertiesautogenerated',
+ 'SystemData',
+ 'SystemService',
+ 'UpdateWorkspaceQuotas',
+ 'UpdateWorkspaceQuotasResult',
+ 'Usage',
+ 'UsageName',
+ 'UserAccountCredentials',
+ 'UserAssignedIdentity',
+ 'VirtualMachine',
+ 'VirtualMachineImage',
+ 'VirtualMachineProperties',
+ 'VirtualMachineSecrets',
+ 'VirtualMachineSize',
+ 'VirtualMachineSizeListResult',
+ 'VirtualMachineSshCredentials',
+ 'VnetConfiguration',
+ 'Workspace',
+ 'WorkspaceConnection',
+ 'WorkspaceConnectionDto',
+ 'WorkspaceListResult',
+ 'WorkspaceSku',
+ 'WorkspaceUpdateParameters',
+ 'AllocationState',
+ 'ApplicationSharingPolicy',
+ 'BillingCurrency',
+ 'ClusterPurpose',
+ 'ComputeEnvironmentType',
+ 'ComputeInstanceAuthorizationType',
+ 'ComputeInstanceState',
+ 'ComputeType',
+ 'DeploymentType',
+ 'EncryptionStatus',
+ 'IdentityType',
+ 'LoadBalancerType',
+ 'NodeState',
+ 'OperationName',
+ 'OperationStatus',
+ 'OrderString',
+ 'OsType',
+ 'PrivateEndpointConnectionProvisioningState',
+ 'PrivateEndpointServiceConnectionStatus',
+ 'ProvisioningState',
+ 'QuotaUnit',
+ 'ReasonCode',
+ 'RemoteLoginPortPublicAccess',
+ 'ResourceIdentityType',
+ 'SshPublicAccess',
+ 'SslConfigurationStatus',
+ 'Status',
+ 'UnderlyingResourceAction',
+ 'UnitOfMeasure',
+ 'UsageUnit',
+ 'ValueFormat',
+ 'VariantType',
+ 'VmPriceOsType',
+ 'VmPriority',
+ 'VmTier',
+ 'WebServiceState',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
new file mode 100644
index 00000000000..0e04c9bd335
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py
@@ -0,0 +1,349 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from enum import Enum, EnumMeta
+from six import with_metaclass
+
+class _CaseInsensitiveEnumMeta(EnumMeta):
+ def __getitem__(self, name):
+ return super().__getitem__(name.upper())
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+ """
+ try:
+ return cls._member_map_[name.upper()]
+ except KeyError:
+ raise AttributeError(name)
+
+
+class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Allocation state of the compute. Possible values are: steady - Indicates that the compute is
+ not resizing. There are no changes to the number of compute nodes in the compute in progress. A
+ compute enters this state when it is created and when no operations are being performed on the
+ compute to change the number of compute nodes. resizing - Indicates that the compute is
+ resizing; that is, compute nodes are being added to or removed from the compute.
+ """
+
+ STEADY = "Steady"
+ RESIZING = "Resizing"
+
+class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Policy for sharing applications on this compute instance among users of parent workspace. If
+ Personal, only the creator can access applications on this compute instance. When Shared, any
+ workspace user can access applications on this instance depending on his/her assigned role.
+ """
+
+ PERSONAL = "Personal"
+ SHARED = "Shared"
+
+class BillingCurrency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Three lettered code specifying the currency of the VM price. Example: USD
+ """
+
+ USD = "USD"
+
+class ClusterPurpose(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Intended usage of the cluster
+ """
+
+ FAST_PROD = "FastProd"
+ DENSE_PROD = "DenseProd"
+ DEV_TEST = "DevTest"
+
+class ComputeEnvironmentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The compute environment type for the service.
+ """
+
+ ACI = "ACI"
+ AKS = "AKS"
+
+class ComputeInstanceAuthorizationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The Compute Instance Authorization type. Available values are personal (default).
+ """
+
+ PERSONAL = "personal"
+
+class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Current state of an ComputeInstance.
+ """
+
+ CREATING = "Creating"
+ CREATE_FAILED = "CreateFailed"
+ DELETING = "Deleting"
+ RUNNING = "Running"
+ RESTARTING = "Restarting"
+ JOB_RUNNING = "JobRunning"
+ SETTING_UP = "SettingUp"
+ SETUP_FAILED = "SetupFailed"
+ STARTING = "Starting"
+ STOPPED = "Stopped"
+ STOPPING = "Stopping"
+ USER_SETTING_UP = "UserSettingUp"
+ USER_SETUP_FAILED = "UserSetupFailed"
+ UNKNOWN = "Unknown"
+ UNUSABLE = "Unusable"
+
+class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of compute
+ """
+
+ AKS = "AKS"
+ AML_COMPUTE = "AmlCompute"
+ COMPUTE_INSTANCE = "ComputeInstance"
+ DATA_FACTORY = "DataFactory"
+ VIRTUAL_MACHINE = "VirtualMachine"
+ HD_INSIGHT = "HDInsight"
+ DATABRICKS = "Databricks"
+ DATA_LAKE_ANALYTICS = "DataLakeAnalytics"
+ SYNAPSE_SPARK = "SynapseSpark"
+
+class DeploymentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The deployment type for the service.
+ """
+
+ GRPC_REALTIME_ENDPOINT = "GRPCRealtimeEndpoint"
+ HTTP_REALTIME_ENDPOINT = "HttpRealtimeEndpoint"
+ BATCH = "Batch"
+
+class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Indicates whether or not the encryption is enabled for the workspace.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of identity that creates/modifies resources
+ """
+
+ USER = "User"
+ APPLICATION = "Application"
+ MANAGED_IDENTITY = "ManagedIdentity"
+ KEY = "Key"
+
+class LoadBalancerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Load Balancer Type
+ """
+
+ PUBLIC_IP = "PublicIp"
+ INTERNAL_LOAD_BALANCER = "InternalLoadBalancer"
+
+class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the compute node. Values are idle, running, preparing, unusable, leaving and
+ preempted.
+ """
+
+ IDLE = "idle"
+ RUNNING = "running"
+ PREPARING = "preparing"
+ UNUSABLE = "unusable"
+ LEAVING = "leaving"
+ PREEMPTED = "preempted"
+
+class OperationName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Name of the last operation.
+ """
+
+ CREATE = "Create"
+ START = "Start"
+ STOP = "Stop"
+ RESTART = "Restart"
+ REIMAGE = "Reimage"
+ DELETE = "Delete"
+
+class OperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operation status.
+ """
+
+ IN_PROGRESS = "InProgress"
+ SUCCEEDED = "Succeeded"
+ CREATE_FAILED = "CreateFailed"
+ START_FAILED = "StartFailed"
+ STOP_FAILED = "StopFailed"
+ RESTART_FAILED = "RestartFailed"
+ REIMAGE_FAILED = "ReimageFailed"
+ DELETE_FAILED = "DeleteFailed"
+
+class OrderString(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ CREATED_AT_DESC = "CreatedAtDesc"
+ CREATED_AT_ASC = "CreatedAtAsc"
+ UPDATED_AT_DESC = "UpdatedAtDesc"
+ UPDATED_AT_ASC = "UpdatedAtAsc"
+
+class OsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Compute OS Type
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current provisioning state.
+ """
+
+ SUCCEEDED = "Succeeded"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ FAILED = "Failed"
+
+class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The private endpoint connection status.
+ """
+
+ PENDING = "Pending"
+ APPROVED = "Approved"
+ REJECTED = "Rejected"
+ DISCONNECTED = "Disconnected"
+ TIMEOUT = "Timeout"
+
+class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current deployment state of workspace resource. The provisioningState is to indicate states
+ for resource provisioning.
+ """
+
+ UNKNOWN = "Unknown"
+ UPDATING = "Updating"
+ CREATING = "Creating"
+ DELETING = "Deleting"
+ SUCCEEDED = "Succeeded"
+ FAILED = "Failed"
+ CANCELED = "Canceled"
+
+class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of quota measurement.
+ """
+
+ COUNT = "Count"
+
+class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The reason for the restriction.
+ """
+
+ NOT_SPECIFIED = "NotSpecified"
+ NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion"
+ NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription"
+
+class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is
+ open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed
+ on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be
+ default only during cluster creation time, after creation it will be either enabled or
+ disabled.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+ NOT_SPECIFIED = "NotSpecified"
+
+class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The identity type.
+ """
+
+ SYSTEM_ASSIGNED = "SystemAssigned"
+ SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned"
+ USER_ASSIGNED = "UserAssigned"
+ NONE = "None"
+
+class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh
+ port is closed on this instance. Enabled - Indicates that the public ssh port is open and
+ accessible according to the VNet/subnet policy if applicable.
+ """
+
+ ENABLED = "Enabled"
+ DISABLED = "Disabled"
+
+class SslConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Enable or disable ssl for scoring
+ """
+
+ DISABLED = "Disabled"
+ ENABLED = "Enabled"
+ AUTO = "Auto"
+
+class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Status of update workspace quota.
+ """
+
+ UNDEFINED = "Undefined"
+ SUCCESS = "Success"
+ FAILURE = "Failure"
+ INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum"
+ INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit"
+ INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName"
+ OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku"
+ OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion"
+
+class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+
+ DELETE = "Delete"
+ DETACH = "Detach"
+
+class UnitOfMeasure(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The unit of time measurement for the specified VM price. Example: OneHour
+ """
+
+ ONE_HOUR = "OneHour"
+
+class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """An enum describing the unit of usage measurement.
+ """
+
+ COUNT = "Count"
+
+class ValueFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """format for the workspace connection value
+ """
+
+ JSON = "JSON"
+
+class VariantType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the variant.
+ """
+
+ CONTROL = "Control"
+ TREATMENT = "Treatment"
+
+class VmPriceOsType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Operating system type used by the VM.
+ """
+
+ LINUX = "Linux"
+ WINDOWS = "Windows"
+
+class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """Virtual Machine priority
+ """
+
+ DEDICATED = "Dedicated"
+ LOW_PRIORITY = "LowPriority"
+
+class VmTier(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The type of the VM.
+ """
+
+ STANDARD = "Standard"
+ LOW_PRIORITY = "LowPriority"
+ SPOT = "Spot"
+
+class WebServiceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
+ """The current state of the service.
+ """
+
+ TRANSITIONING = "Transitioning"
+ HEALTHY = "Healthy"
+ UNHEALTHY = "Unhealthy"
+ FAILED = "Failed"
+ UNSCHEDULABLE = "Unschedulable"
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
new file mode 100644
index 00000000000..7cddba7bd15
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py
@@ -0,0 +1,7053 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+
+class CreateServiceRequest(msrest.serialization.Model):
+ """The base class for creating a service.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceCreateRequest, CreateEndpointVariantRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceCreateRequest', 'Custom': 'CreateEndpointVariantRequest'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateServiceRequest, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.kv_tags = kwargs.get('kv_tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.keys = kwargs.get('keys', None)
+ self.compute_type = None # type: Optional[str]
+ self.environment_image_request = kwargs.get('environment_image_request', None)
+ self.location = kwargs.get('location', None)
+
+
+class AciServiceCreateRequest(CreateServiceRequest):
+ """AciServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param dns_name_label: The Dns label for the service.
+ :type dns_name_label: str
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequest, self).__init__(**kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.auth_enabled = kwargs.get('auth_enabled', False)
+ self.ssl_enabled = kwargs.get('ssl_enabled', False)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', False)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.ssl_certificate = kwargs.get('ssl_certificate', None)
+ self.ssl_key = kwargs.get('ssl_key', None)
+ self.cname = kwargs.get('cname', None)
+ self.dns_name_label = kwargs.get('dns_name_label', None)
+ self.vnet_configuration = kwargs.get('vnet_configuration', None)
+ self.encryption_properties = kwargs.get('encryption_properties', None)
+
+
+class ModelDataCollection(msrest.serialization.Model):
+ """The Model data collection properties.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDataCollection, self).__init__(**kwargs)
+ self.event_hub_enabled = kwargs.get('event_hub_enabled', None)
+ self.storage_enabled = kwargs.get('storage_enabled', None)
+
+
+class AciServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestDataCollection, self).__init__(**kwargs)
+
+
+class EncryptionProperties(msrest.serialization.Model):
+ """EncryptionProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperties, self).__init__(**kwargs)
+ self.vault_base_url = kwargs['vault_base_url']
+ self.key_name = kwargs['key_name']
+ self.key_version = kwargs['key_version']
+
+
+class AciServiceCreateRequestEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestEncryptionProperties, self).__init__(**kwargs)
+
+
+class VnetConfiguration(msrest.serialization.Model):
+ """VnetConfiguration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VnetConfiguration, self).__init__(**kwargs)
+ self.vnet_name = kwargs.get('vnet_name', None)
+ self.subnet_name = kwargs.get('subnet_name', None)
+
+
+class AciServiceCreateRequestVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestVnetConfiguration, self).__init__(**kwargs)
+
+
+class ServiceResponseBase(msrest.serialization.Model):
+ """The base service response. The correct inherited response based on computeType will be returned (ex. ACIServiceResponse).
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceResponse, AksVariantResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceResponse', 'Custom': 'AksVariantResponse'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResponseBase, self).__init__(**kwargs)
+ self.description = kwargs.get('description', None)
+ self.kv_tags = kwargs.get('kv_tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.state = None
+ self.error = None
+ self.compute_type = None # type: Optional[str]
+ self.deployment_type = kwargs.get('deployment_type', None)
+
+
+class AciServiceResponse(ServiceResponseBase):
+ """The response for an ACI service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param public_ip: The public IP address for the service.
+ :type public_ip: str
+ :param public_fqdn: The public Fqdn for the service.
+ :type public_fqdn: str
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'public_ip': {'key': 'publicIp', 'type': 'str'},
+ 'public_fqdn': {'key': 'publicFqdn', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponse, self).__init__(**kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.scoring_uri = None
+ self.location = kwargs.get('location', None)
+ self.auth_enabled = kwargs.get('auth_enabled', None)
+ self.ssl_enabled = kwargs.get('ssl_enabled', None)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.ssl_certificate = kwargs.get('ssl_certificate', None)
+ self.ssl_key = kwargs.get('ssl_key', None)
+ self.cname = kwargs.get('cname', None)
+ self.public_ip = kwargs.get('public_ip', None)
+ self.public_fqdn = kwargs.get('public_fqdn', None)
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.models = kwargs.get('models', None)
+ self.environment_image_request = kwargs.get('environment_image_request', None)
+ self.vnet_configuration = kwargs.get('vnet_configuration', None)
+ self.encryption_properties = kwargs.get('encryption_properties', None)
+
+
+class AciServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseDataCollection, self).__init__(**kwargs)
+
+
+class AciServiceResponseEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseEncryptionProperties, self).__init__(**kwargs)
+
+
+class EnvironmentImageResponse(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageResponse, self).__init__(**kwargs)
+ self.driver_program = kwargs.get('driver_program', None)
+ self.assets = kwargs.get('assets', None)
+ self.model_ids = kwargs.get('model_ids', None)
+ self.models = kwargs.get('models', None)
+ self.environment = kwargs.get('environment', None)
+ self.environment_reference = kwargs.get('environment_reference', None)
+
+
+class AciServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseEnvironmentImageRequest, self).__init__(**kwargs)
+
+
+class AciServiceResponseVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AciServiceResponseVnetConfiguration, self).__init__(**kwargs)
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, SynapseSpark, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = kwargs.get('disable_local_auth', None)
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Aks, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = kwargs.get('user_kube_config', None)
+ self.admin_kube_config = kwargs.get('admin_kube_config', None)
+ self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None)
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = kwargs.get('subnet_id', None)
+ self.service_cidr = kwargs.get('service_cidr', None)
+ self.dns_service_ip = kwargs.get('dns_service_ip', None)
+ self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None)
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd",
+ "DenseProd", "DevTest". Default value: "FastProd".
+ :type cluster_purpose: str or ~azure_machine_learning_workspaces.models.ClusterPurpose
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ :param load_balancer_type: Load Balancer Type. Possible values include: "PublicIp",
+ "InternalLoadBalancer". Default value: "PublicIp".
+ :type load_balancer_type: str or ~azure_machine_learning_workspaces.models.LoadBalancerType
+ :param load_balancer_subnet: Load Balancer Subnet.
+ :type load_balancer_subnet: str
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'},
+ 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = kwargs.get('cluster_fqdn', None)
+ self.system_services = None
+ self.agent_count = kwargs.get('agent_count', None)
+ self.agent_vm_size = kwargs.get('agent_vm_size', None)
+ self.cluster_purpose = kwargs.get('cluster_purpose', "FastProd")
+ self.ssl_configuration = kwargs.get('ssl_configuration', None)
+ self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None)
+ self.load_balancer_type = kwargs.get('load_balancer_type', "PublicIp")
+ self.load_balancer_subnet = kwargs.get('load_balancer_subnet', None)
+
+
+class AksReplicaStatus(msrest.serialization.Model):
+ """AksReplicaStatus.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksReplicaStatus, self).__init__(**kwargs)
+ self.desired_replicas = kwargs.get('desired_replicas', None)
+ self.updated_replicas = kwargs.get('updated_replicas', None)
+ self.available_replicas = kwargs.get('available_replicas', None)
+ self.error = kwargs.get('error', None)
+
+
+class MachineLearningServiceError(msrest.serialization.Model):
+ """Wrapper for error response to follow ARM guidelines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MachineLearningServiceError, self).__init__(**kwargs)
+ self.error = None
+
+
+class AksReplicaStatusError(MachineLearningServiceError):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksReplicaStatusError, self).__init__(**kwargs)
+
+
+class CreateEndpointVariantRequest(CreateServiceRequest):
+ """The Variant properties.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceCreateRequest'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateEndpointVariantRequest, self).__init__(**kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = kwargs.get('is_default', None)
+ self.traffic_percentile = kwargs.get('traffic_percentile', None)
+ self.type = kwargs.get('type', None)
+
+
+class AksServiceCreateRequest(CreateEndpointVariantRequest):
+ """The request to create an AKS service.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param namespace: Kubernetes namespace for the service.
+ :type namespace: str
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequest, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.num_replicas = kwargs.get('num_replicas', None)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.compute_name = kwargs.get('compute_name', None)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.auto_scaler = kwargs.get('auto_scaler', None)
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None)
+ self.max_queue_wait_ms = kwargs.get('max_queue_wait_ms', None)
+ self.namespace = kwargs.get('namespace', None)
+ self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
+ self.auth_enabled = kwargs.get('auth_enabled', None)
+ self.liveness_probe_requirements = kwargs.get('liveness_probe_requirements', None)
+ self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None)
+
+
+class AutoScaler(msrest.serialization.Model):
+ """The Auto Scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoScaler, self).__init__(**kwargs)
+ self.autoscale_enabled = kwargs.get('autoscale_enabled', None)
+ self.min_replicas = kwargs.get('min_replicas', None)
+ self.max_replicas = kwargs.get('max_replicas', None)
+ self.target_utilization = kwargs.get('target_utilization', None)
+ self.refresh_period_in_seconds = kwargs.get('refresh_period_in_seconds', None)
+
+
+class AksServiceCreateRequestAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestAutoScaler, self).__init__(**kwargs)
+
+
+class AksServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestDataCollection, self).__init__(**kwargs)
+
+
+class LivenessProbeRequirements(msrest.serialization.Model):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(LivenessProbeRequirements, self).__init__(**kwargs)
+ self.failure_threshold = kwargs.get('failure_threshold', None)
+ self.success_threshold = kwargs.get('success_threshold', None)
+ self.timeout_seconds = kwargs.get('timeout_seconds', None)
+ self.period_seconds = kwargs.get('period_seconds', None)
+ self.initial_delay_seconds = kwargs.get('initial_delay_seconds', None)
+
+
+class AksServiceCreateRequestLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestLivenessProbeRequirements, self).__init__(**kwargs)
+
+
+class AksVariantResponse(ServiceResponseBase):
+ """The response for an AKS variant.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceResponse'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksVariantResponse, self).__init__(**kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = kwargs.get('is_default', None)
+ self.traffic_percentile = kwargs.get('traffic_percentile', None)
+ self.type = kwargs.get('type', None)
+
+
+class AksServiceResponse(AksVariantResponse):
+ """The response for an AKS service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param namespace: The Kubernetes namespace of the deployment.
+ :type namespace: str
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :ivar deployment_status: The deployment status.
+ :vartype deployment_status: ~azure_machine_learning_workspaces.models.AksReplicaStatus
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'deployment_status': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'deployment_status': {'key': 'deploymentStatus', 'type': 'AksReplicaStatus'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponse, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.models = kwargs.get('models', None)
+ self.container_resource_requirements = kwargs.get('container_resource_requirements', None)
+ self.max_concurrent_requests_per_container = kwargs.get('max_concurrent_requests_per_container', None)
+ self.max_queue_wait_ms = kwargs.get('max_queue_wait_ms', None)
+ self.compute_name = kwargs.get('compute_name', None)
+ self.namespace = kwargs.get('namespace', None)
+ self.num_replicas = kwargs.get('num_replicas', None)
+ self.data_collection = kwargs.get('data_collection', None)
+ self.app_insights_enabled = kwargs.get('app_insights_enabled', None)
+ self.auto_scaler = kwargs.get('auto_scaler', None)
+ self.scoring_uri = None
+ self.deployment_status = None
+ self.scoring_timeout_ms = kwargs.get('scoring_timeout_ms', None)
+ self.liveness_probe_requirements = kwargs.get('liveness_probe_requirements', None)
+ self.auth_enabled = kwargs.get('auth_enabled', None)
+ self.aad_auth_enabled = kwargs.get('aad_auth_enabled', None)
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.environment_image_request = kwargs.get('environment_image_request', None)
+
+
+class AksServiceResponseAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseAutoScaler, self).__init__(**kwargs)
+
+
+class AksServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseDataCollection, self).__init__(**kwargs)
+
+
+class AksServiceResponseDeploymentStatus(AksReplicaStatus):
+ """The deployment status.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseDeploymentStatus, self).__init__(**kwargs)
+
+
+class AksServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseEnvironmentImageRequest, self).__init__(**kwargs)
+
+
+class AksServiceResponseLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksServiceResponseLivenessProbeRequirements, self).__init__(**kwargs)
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = kwargs.get('os_type', "Linux")
+ self.vm_size = kwargs.get('vm_size', None)
+ self.vm_priority = kwargs.get('vm_priority', None)
+ self.virtual_machine_image = kwargs.get('virtual_machine_image', None)
+ self.isolated_network = kwargs.get('isolated_network', None)
+ self.scale_settings = kwargs.get('scale_settings', None)
+ self.user_account_credentials = kwargs.get('user_account_credentials', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified")
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True)
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.display_name = kwargs.get('display_name', None)
+ self.description = kwargs.get('description', None)
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = kwargs['object_id']
+ self.tenant_id = kwargs['tenant_id']
+
+
+class AuthKeys(msrest.serialization.Model):
+ """AuthKeys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AuthKeys, self).__init__(**kwargs)
+ self.primary_key = kwargs.get('primary_key', None)
+ self.secondary_key = kwargs.get('secondary_key', None)
+
+
+class AutoPauseProperties(msrest.serialization.Model):
+ """Auto pause properties.
+
+ :param delay_in_minutes:
+ :type delay_in_minutes: int
+ :param enabled:
+ :type enabled: bool
+ """
+
+ _attribute_map = {
+ 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoPauseProperties, self).__init__(**kwargs)
+ self.delay_in_minutes = kwargs.get('delay_in_minutes', None)
+ self.enabled = kwargs.get('enabled', None)
+
+
+class AutoScaleProperties(msrest.serialization.Model):
+ """Auto scale properties.
+
+ :param min_node_count:
+ :type min_node_count: int
+ :param enabled:
+ :type enabled: bool
+ :param max_node_count:
+ :type max_node_count: int
+ """
+
+ _attribute_map = {
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AutoScaleProperties, self).__init__(**kwargs)
+ self.min_node_count = kwargs.get('min_node_count', None)
+ self.enabled = kwargs.get('enabled', None)
+ self.max_node_count = kwargs.get('max_node_count', None)
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = kwargs.get('scale_settings', None)
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(**kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = kwargs.get('display_name', None)
+ self.endpoint_uri = kwargs.get('endpoint_uri', None)
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = kwargs.get('operation_name', None)
+ self.operation_time = kwargs.get('operation_time', None)
+ self.operation_status = kwargs.get('operation_status', None)
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = kwargs.get('vm_size', None)
+ self.subnet = kwargs.get('subnet', None)
+ self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared")
+ self.ssh_settings = kwargs.get('ssh_settings', None)
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal")
+ self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None)
+ self.setup_scripts = kwargs.get('setup_scripts', None)
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled")
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = kwargs.get('admin_public_key', None)
+
+
+class Resource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.identity = kwargs.get('identity', None)
+ self.location = kwargs.get('location', None)
+ self.type = None
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.system_data = None
+
+
+class ComputeResource(Resource):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class ContainerRegistry(msrest.serialization.Model):
+ """ContainerRegistry.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerRegistry, self).__init__(**kwargs)
+ self.address = kwargs.get('address', None)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+
+
+class ContainerRegistryResponse(msrest.serialization.Model):
+ """ContainerRegistryResponse.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerRegistryResponse, self).__init__(**kwargs)
+ self.address = kwargs.get('address', None)
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The minimum amount of CPU cores to be used by the container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu: float
+ :param cpu_limit: The maximum amount of CPU cores allowed to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu_limit: float
+ :param memory_in_gb: The minimum amount of memory (in GB) to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb: float
+ :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to be used by the
+ container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb_limit: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = kwargs.get('cpu', None)
+ self.cpu_limit = kwargs.get('cpu_limit', None)
+ self.memory_in_gb = kwargs.get('memory_in_gb', None)
+ self.memory_in_gb_limit = kwargs.get('memory_in_gb_limit', None)
+ self.gpu = kwargs.get('gpu', None)
+ self.fpga = kwargs.get('fpga', None)
+
+
+class CosmosDbSettings(msrest.serialization.Model):
+ """CosmosDbSettings.
+
+ :param collections_throughput: The throughput of the collections in cosmosdb database.
+ :type collections_throughput: int
+ """
+
+ _attribute_map = {
+ 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CosmosDbSettings, self).__init__(**kwargs)
+ self.collections_throughput = kwargs.get('collections_throughput', None)
+
+
+class EnvironmentImageRequest(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageRequest, self).__init__(**kwargs)
+ self.driver_program = kwargs.get('driver_program', None)
+ self.assets = kwargs.get('assets', None)
+ self.model_ids = kwargs.get('model_ids', None)
+ self.models = kwargs.get('models', None)
+ self.environment = kwargs.get('environment', None)
+ self.environment_reference = kwargs.get('environment_reference', None)
+
+
+class CreateServiceRequestEnvironmentImageRequest(EnvironmentImageRequest):
+ """The Environment, models and assets needed for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateServiceRequestEnvironmentImageRequest, self).__init__(**kwargs)
+
+
+class CreateServiceRequestKeys(AuthKeys):
+ """The authentication keys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(CreateServiceRequestKeys, self).__init__(**kwargs)
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ :param workspace_url: Workspace Url.
+ :type workspace_url: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = kwargs.get('databricks_access_token', None)
+ self.workspace_url = kwargs.get('workspace_url', None)
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(**kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(**kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None)
+
+
+class DatasetReference(msrest.serialization.Model):
+ """The dataset reference object.
+
+ :param name: The name of the dataset reference.
+ :type name: str
+ :param id: The id of the dataset reference.
+ :type id: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(DatasetReference, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.id = kwargs.get('id', None)
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param identity: The identity that will be used to access the key vault for encryption at rest.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityForCmk
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityForCmk'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = kwargs['status']
+ self.identity = kwargs.get('identity', None)
+ self.key_vault_properties = kwargs['key_vault_properties']
+
+
+class ModelEnvironmentDefinition(msrest.serialization.Model):
+ """ModelEnvironmentDefinition.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinition, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.version = kwargs.get('version', None)
+ self.python = kwargs.get('python', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.docker = kwargs.get('docker', None)
+ self.spark = kwargs.get('spark', None)
+ self.r = kwargs.get('r', None)
+ self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None)
+
+
+class EnvironmentImageRequestEnvironment(ModelEnvironmentDefinition):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironment, self).__init__(**kwargs)
+
+
+class EnvironmentReference(msrest.serialization.Model):
+ """EnvironmentReference.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentReference, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.version = kwargs.get('version', None)
+
+
+class EnvironmentImageRequestEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironmentReference, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionResponse(msrest.serialization.Model):
+ """ModelEnvironmentDefinitionResponse.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponse, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.version = kwargs.get('version', None)
+ self.python = kwargs.get('python', None)
+ self.environment_variables = kwargs.get('environment_variables', None)
+ self.docker = kwargs.get('docker', None)
+ self.spark = kwargs.get('spark', None)
+ self.r = kwargs.get('r', None)
+ self.inferencing_stack_version = kwargs.get('inferencing_stack_version', None)
+
+
+class EnvironmentImageResponseEnvironment(ModelEnvironmentDefinitionResponse):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironment, self).__init__(**kwargs)
+
+
+class EnvironmentImageResponseEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironmentReference, self).__init__(**kwargs)
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """Error detail information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code: Required. Error code.
+ :type code: str
+ :param message: Required. Error message.
+ :type message: str
+ """
+
+ _validation = {
+ 'code': {'required': True},
+ 'message': {'required': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = kwargs['code']
+ self.message = kwargs['message']
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Error response information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar target: The target of the particular error.
+ :vartype target: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'target': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = kwargs['retail_price']
+ self.os_type = kwargs['os_type']
+ self.vm_tier = kwargs['vm_tier']
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = kwargs['billing_currency']
+ self.unit_of_measure = kwargs['unit_of_measure']
+ self.values = kwargs['values']
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(**kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = kwargs.get('type', None)
+ self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
+
+
+class IdentityForCmk(msrest.serialization.Model):
+ """Identity that will be used to access key vault for encryption at rest.
+
+ :param user_assigned_identity: The ArmId of the user assigned identity that will be used to
+ access the customer managed key vault.
+ :type user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(IdentityForCmk, self).__init__(**kwargs)
+ self.user_assigned_identity = kwargs.get('user_assigned_identity', None)
+
+
+class ImageAsset(msrest.serialization.Model):
+ """An Image asset.
+
+ :param id: The Asset Id.
+ :type id: str
+ :param mime_type: The mime type.
+ :type mime_type: str
+ :param url: The Url of the Asset.
+ :type url: str
+ :param unpack: Whether the Asset is unpacked.
+ :type unpack: bool
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ImageAsset, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.mime_type = kwargs.get('mime_type', None)
+ self.url = kwargs.get('url', None)
+ self.unpack = kwargs.get('unpack', None)
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = kwargs['key_vault_arm_id']
+ self.key_identifier = kwargs['key_identifier']
+ self.identity_client_id = kwargs.get('identity_client_id', None)
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListStorageAccountKeysResult(msrest.serialization.Model):
+ """ListStorageAccountKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListStorageAccountKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :ivar notebook_access_keys:
+ :vartype notebook_access_keys: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ 'notebook_access_keys': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Model(msrest.serialization.Model):
+ """An Azure Machine Learning Model.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: The Model Id.
+ :type id: str
+ :param name: Required. The Model name.
+ :type name: str
+ :param framework: The Model framework.
+ :type framework: str
+ :param framework_version: The Model framework version.
+ :type framework_version: str
+ :param version: The Model version assigned by Model Management Service.
+ :type version: long
+ :param datasets: The list of datasets associated with the model.
+ :type datasets: list[~azure_machine_learning_workspaces.models.DatasetReference]
+ :param url: Required. The URL of the Model. Usually a SAS URL.
+ :type url: str
+ :param mime_type: Required. The MIME type of Model content. For more details about MIME type,
+ please open https://www.iana.org/assignments/media-types/media-types.xhtml.
+ :type mime_type: str
+ :param description: The Model description text.
+ :type description: str
+ :param created_time: The Model creation time (UTC).
+ :type created_time: ~datetime.datetime
+ :param modified_time: The Model last modified time (UTC).
+ :type modified_time: ~datetime.datetime
+ :param unpack: Indicates whether we need to unpack the Model during docker Image creation.
+ :type unpack: bool
+ :param parent_model_id: The Parent Model Id.
+ :type parent_model_id: str
+ :param run_id: The RunId that created this model.
+ :type run_id: str
+ :param experiment_name: The name of the experiment where this model was created.
+ :type experiment_name: str
+ :param kv_tags: The Model tag dictionary. Items are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The Model property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param derived_model_ids: Models derived from this model.
+ :type derived_model_ids: list[str]
+ :param sample_input_data: Sample Input Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_input_data: str
+ :param sample_output_data: Sample Output Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_output_data: str
+ :param resource_requirements: Resource requirements for the model.
+ :type resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ 'url': {'required': True},
+ 'mime_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'framework': {'key': 'framework', 'type': 'str'},
+ 'framework_version': {'key': 'frameworkVersion', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'long'},
+ 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+ 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ 'parent_model_id': {'key': 'parentModelId', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'derived_model_ids': {'key': 'derivedModelIds', 'type': '[str]'},
+ 'sample_input_data': {'key': 'sampleInputData', 'type': 'str'},
+ 'sample_output_data': {'key': 'sampleOutputData', 'type': 'str'},
+ 'resource_requirements': {'key': 'resourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Model, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.name = kwargs['name']
+ self.framework = kwargs.get('framework', None)
+ self.framework_version = kwargs.get('framework_version', None)
+ self.version = kwargs.get('version', None)
+ self.datasets = kwargs.get('datasets', None)
+ self.url = kwargs['url']
+ self.mime_type = kwargs['mime_type']
+ self.description = kwargs.get('description', None)
+ self.created_time = kwargs.get('created_time', None)
+ self.modified_time = kwargs.get('modified_time', None)
+ self.unpack = kwargs.get('unpack', None)
+ self.parent_model_id = kwargs.get('parent_model_id', None)
+ self.run_id = kwargs.get('run_id', None)
+ self.experiment_name = kwargs.get('experiment_name', None)
+ self.kv_tags = kwargs.get('kv_tags', None)
+ self.properties = kwargs.get('properties', None)
+ self.derived_model_ids = kwargs.get('derived_model_ids', None)
+ self.sample_input_data = kwargs.get('sample_input_data', None)
+ self.sample_output_data = kwargs.get('sample_output_data', None)
+ self.resource_requirements = kwargs.get('resource_requirements', None)
+
+
+class ModelDockerSection(msrest.serialization.Model):
+ """ModelDockerSection.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSection, self).__init__(**kwargs)
+ self.base_image = kwargs.get('base_image', None)
+ self.base_dockerfile = kwargs.get('base_dockerfile', None)
+ self.base_image_registry = kwargs.get('base_image_registry', None)
+
+
+class ModelDockerSectionBaseImageRegistry(ContainerRegistry):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSectionBaseImageRegistry, self).__init__(**kwargs)
+
+
+class ModelDockerSectionResponse(msrest.serialization.Model):
+ """ModelDockerSectionResponse.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponse, self).__init__(**kwargs)
+ self.base_image = kwargs.get('base_image', None)
+ self.base_dockerfile = kwargs.get('base_dockerfile', None)
+ self.base_image_registry = kwargs.get('base_image_registry', None)
+
+
+class ModelDockerSectionResponseBaseImageRegistry(ContainerRegistryResponse):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponseBaseImageRegistry, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionDocker(ModelDockerSection):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionDocker, self).__init__(**kwargs)
+
+
+class ModelPythonSection(msrest.serialization.Model):
+ """ModelPythonSection.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelPythonSection, self).__init__(**kwargs)
+ self.interpreter_path = kwargs.get('interpreter_path', None)
+ self.user_managed_dependencies = kwargs.get('user_managed_dependencies', None)
+ self.conda_dependencies = kwargs.get('conda_dependencies', None)
+ self.base_conda_environment = kwargs.get('base_conda_environment', None)
+
+
+class ModelEnvironmentDefinitionPython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionPython, self).__init__(**kwargs)
+
+
+class RSection(msrest.serialization.Model):
+ """RSection.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RSection, self).__init__(**kwargs)
+ self.r_version = kwargs.get('r_version', None)
+ self.user_managed = kwargs.get('user_managed', None)
+ self.rscript_path = kwargs.get('rscript_path', None)
+ self.snapshot_date = kwargs.get('snapshot_date', None)
+ self.cran_packages = kwargs.get('cran_packages', None)
+ self.git_hub_packages = kwargs.get('git_hub_packages', None)
+ self.custom_url_packages = kwargs.get('custom_url_packages', None)
+ self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None)
+
+
+class ModelEnvironmentDefinitionR(RSection):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionR, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionResponseDocker(ModelDockerSectionResponse):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseDocker, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionResponsePython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponsePython, self).__init__(**kwargs)
+
+
+class RSectionResponse(msrest.serialization.Model):
+ """RSectionResponse.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RSectionResponse, self).__init__(**kwargs)
+ self.r_version = kwargs.get('r_version', None)
+ self.user_managed = kwargs.get('user_managed', None)
+ self.rscript_path = kwargs.get('rscript_path', None)
+ self.snapshot_date = kwargs.get('snapshot_date', None)
+ self.cran_packages = kwargs.get('cran_packages', None)
+ self.git_hub_packages = kwargs.get('git_hub_packages', None)
+ self.custom_url_packages = kwargs.get('custom_url_packages', None)
+ self.bio_conductor_packages = kwargs.get('bio_conductor_packages', None)
+
+
+class ModelEnvironmentDefinitionResponseR(RSectionResponse):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseR, self).__init__(**kwargs)
+
+
+class ModelSparkSection(msrest.serialization.Model):
+ """ModelSparkSection.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelSparkSection, self).__init__(**kwargs)
+ self.repositories = kwargs.get('repositories', None)
+ self.packages = kwargs.get('packages', None)
+ self.precache_packages = kwargs.get('precache_packages', None)
+
+
+class ModelEnvironmentDefinitionResponseSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseSpark, self).__init__(**kwargs)
+
+
+class ModelEnvironmentDefinitionSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionSpark, self).__init__(**kwargs)
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookAccessTokenResult(msrest.serialization.Model):
+ """NotebookAccessTokenResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar notebook_resource_id:
+ :vartype notebook_resource_id: str
+ :ivar host_name:
+ :vartype host_name: str
+ :ivar public_dns:
+ :vartype public_dns: str
+ :ivar access_token:
+ :vartype access_token: str
+ :ivar token_type:
+ :vartype token_type: str
+ :ivar expires_in:
+ :vartype expires_in: int
+ :ivar refresh_token:
+ :vartype refresh_token: str
+ :ivar scope:
+ :vartype scope: str
+ """
+
+ _validation = {
+ 'notebook_resource_id': {'readonly': True},
+ 'host_name': {'readonly': True},
+ 'public_dns': {'readonly': True},
+ 'access_token': {'readonly': True},
+ 'token_type': {'readonly': True},
+ 'expires_in': {'readonly': True},
+ 'refresh_token': {'readonly': True},
+ 'scope': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'},
+ 'host_name': {'key': 'hostName', 'type': 'str'},
+ 'public_dns': {'key': 'publicDns', 'type': 'str'},
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expires_in': {'key': 'expiresIn', 'type': 'int'},
+ 'refresh_token': {'key': 'refreshToken', 'type': 'str'},
+ 'scope': {'key': 'scope', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookAccessTokenResult, self).__init__(**kwargs)
+ self.notebook_resource_id = None
+ self.host_name = None
+ self.public_dns = None
+ self.access_token = None
+ self.token_type = None
+ self.expires_in = None
+ self.refresh_token = None
+ self.scope = None
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = kwargs.get('error_message', None)
+ self.status_code = kwargs.get('status_code', None)
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = kwargs.get('fqdn', None)
+ self.resource_id = kwargs.get('resource_id', None)
+ self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None)
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.display = kwargs.get('display', None)
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = kwargs.get('provider', None)
+ self.resource = kwargs.get('resource', None)
+ self.operation = kwargs.get('operation', None)
+ self.description = kwargs.get('description', None)
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class PaginatedServiceList(msrest.serialization.Model):
+ """Paginated list of Machine Learning service objects wrapped in ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ServiceResource]
+ :ivar next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ServiceResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedServiceList, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = kwargs.get('assigned_user', None)
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to.
+ :vartype subnet_arm_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'subnet_arm_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+ self.subnet_arm_id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(**kwargs)
+ self.private_endpoint = kwargs.get('private_endpoint', None)
+ self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None)
+ self.provisioning_state = None
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(**kwargs)
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = kwargs.get('required_zone_names', None)
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.description = kwargs.get('description', None)
+ self.actions_required = kwargs.get('actions_required', None)
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = kwargs.get('id', None)
+ self.type = kwargs.get('type', None)
+ self.limit = kwargs.get('limit', None)
+ self.unit = kwargs.get('unit', None)
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ :param location: Region of workspace quota to be updated.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.location = kwargs.get('location', None)
+
+
+class RCranPackage(msrest.serialization.Model):
+ """RCranPackage.
+
+ :param name: The package name.
+ :type name: str
+ :param repository: The repository name.
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RCranPackage, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.repository = kwargs.get('repository', None)
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = kwargs.get('passwords', None)
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = kwargs.get('reason_code', None)
+
+
+class RGitHubPackage(msrest.serialization.Model):
+ """RGitHubPackage.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ :param auth_token: Personal access token to install from a private repo.
+ :type auth_token: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ 'auth_token': {'key': 'authToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RGitHubPackage, self).__init__(**kwargs)
+ self.repository = kwargs.get('repository', None)
+ self.auth_token = kwargs.get('auth_token', None)
+
+
+class RGitHubPackageResponse(msrest.serialization.Model):
+ """RGitHubPackageResponse.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(RGitHubPackageResponse, self).__init__(**kwargs)
+ self.repository = kwargs.get('repository', None)
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = kwargs['max_node_count']
+ self.min_node_count = kwargs.get('min_node_count', 0)
+ self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None)
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = kwargs.get('script_source', None)
+ self.script_data = kwargs.get('script_data', None)
+ self.script_arguments = kwargs.get('script_arguments', None)
+ self.timeout = kwargs.get('timeout', None)
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = kwargs.get('startup_script', None)
+ self.creation_script = kwargs.get('creation_script', None)
+
+
+class ServiceManagedResourcesSettings(msrest.serialization.Model):
+ """ServiceManagedResourcesSettings.
+
+ :param cosmos_db: The settings for the service managed cosmosdb account.
+ :type cosmos_db: ~azure_machine_learning_workspaces.models.CosmosDbSettings
+ """
+
+ _attribute_map = {
+ 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceManagedResourcesSettings, self).__init__(**kwargs)
+ self.cosmos_db = kwargs.get('cosmos_db', None)
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = kwargs['client_id']
+ self.client_secret = kwargs['client_secret']
+
+
+class ServiceResource(Resource):
+ """Machine Learning service object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Service properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ServiceResponseBase
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties', 'type': 'ServiceResponseBase'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResource, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class ServiceResponseBaseError(MachineLearningServiceError):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResponseBaseError, self).__init__(**kwargs)
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = kwargs.get('scripts', None)
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.private_link_resource_id = kwargs.get('private_link_resource_id', None)
+ self.group_id = kwargs.get('group_id', None)
+ self.request_message = kwargs.get('request_message', None)
+ self.status = kwargs.get('status', None)
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.tier = kwargs.get('tier', None)
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.value = kwargs.get('value', None)
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class SparkMavenPackage(msrest.serialization.Model):
+ """SparkMavenPackage.
+
+ :param group:
+ :type group: str
+ :param artifact:
+ :type artifact: str
+ :param version:
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'group': {'key': 'group', 'type': 'str'},
+ 'artifact': {'key': 'artifact', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SparkMavenPackage, self).__init__(**kwargs)
+ self.group = kwargs.get('group', None)
+ self.artifact = kwargs.get('artifact', None)
+ self.version = kwargs.get('version', None)
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ :param leaf_domain_label: Leaf domain label of public endpoint.
+ :type leaf_domain_label: str
+ :param overwrite_existing_domain: Indicates whether to overwrite existing domain label.
+ :type overwrite_existing_domain: bool
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'},
+ 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = kwargs.get('status', None)
+ self.cert = kwargs.get('cert', None)
+ self.key = kwargs.get('key', None)
+ self.cname = kwargs.get('cname', None)
+ self.leaf_domain_label = kwargs.get('leaf_domain_label', None)
+ self.overwrite_existing_domain = kwargs.get('overwrite_existing_domain', None)
+
+
+class SynapseSparkPoolProperties(msrest.serialization.Model):
+ """Properties specific to Synapse Spark pools.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSparkPoolProperties, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+
+
+class SynapseSpark(Compute, SynapseSparkPoolProperties):
+ """A SynapseSpark compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSpark, self).__init__(**kwargs)
+ self.properties = kwargs.get('properties', None)
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_location = kwargs.get('compute_location', None)
+ self.provisioning_state = None
+ self.description = kwargs.get('description', None)
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = kwargs.get('resource_id', None)
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = kwargs.get('disable_local_auth', None)
+
+
+class SynapseSparkPoolPropertiesautogenerated(msrest.serialization.Model):
+ """AKS properties.
+
+ :param auto_scale_properties: Auto scale properties.
+ :type auto_scale_properties: ~azure_machine_learning_workspaces.models.AutoScaleProperties
+ :param auto_pause_properties: Auto pause properties.
+ :type auto_pause_properties: ~azure_machine_learning_workspaces.models.AutoPauseProperties
+ :param spark_version: Spark version.
+ :type spark_version: str
+ :param node_count: The number of compute nodes currently assigned to the compute.
+ :type node_count: int
+ :param node_size: Node size.
+ :type node_size: str
+ :param node_size_family: Node size family.
+ :type node_size_family: str
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param resource_group: Name of the resource group in which workspace is located.
+ :type resource_group: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param pool_name: Pool name.
+ :type pool_name: str
+ """
+
+ _attribute_map = {
+ 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'},
+ 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'},
+ 'spark_version': {'key': 'sparkVersion', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'node_size': {'key': 'nodeSize', 'type': 'str'},
+ 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'},
+ 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
+ 'resource_group': {'key': 'resourceGroup', 'type': 'str'},
+ 'workspace_name': {'key': 'workspaceName', 'type': 'str'},
+ 'pool_name': {'key': 'poolName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SynapseSparkPoolPropertiesautogenerated, self).__init__(**kwargs)
+ self.auto_scale_properties = kwargs.get('auto_scale_properties', None)
+ self.auto_pause_properties = kwargs.get('auto_pause_properties', None)
+ self.spark_version = kwargs.get('spark_version', None)
+ self.node_count = kwargs.get('node_count', None)
+ self.node_size = kwargs.get('node_size', None)
+ self.node_size_family = kwargs.get('node_size_family', None)
+ self.subscription_id = kwargs.get('subscription_id', None)
+ self.resource_group = kwargs.get('resource_group', None)
+ self.workspace_name = kwargs.get('workspace_name', None)
+ self.pool_name = kwargs.get('pool_name', None)
+
+
+class SystemData(msrest.serialization.Model):
+ """Read only system data.
+
+ :param created_by: An identifier for the identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: An identifier for the identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = kwargs.get('created_by', None)
+ self.created_by_type = kwargs.get('created_by_type', None)
+ self.created_at = kwargs.get('created_at', None)
+ self.last_modified_by = kwargs.get('last_modified_by', None)
+ self.last_modified_by_type = kwargs.get('last_modified_by_type', None)
+ self.last_modified_at = kwargs.get('last_modified_at', None)
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = kwargs.get('limit', None)
+ self.unit = None
+ self.status = kwargs.get('status', None)
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = kwargs['admin_user_name']
+ self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None)
+ self.admin_user_password = kwargs.get('admin_user_password', None)
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = kwargs.get('properties', None)
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = kwargs['id']
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ :param is_notebook_instance_compute: Indicates whether this compute will be used for running
+ notebooks.
+ :type is_notebook_instance_compute: bool
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = kwargs.get('virtual_machine_size', None)
+ self.ssh_port = kwargs.get('ssh_port', None)
+ self.address = kwargs.get('address', None)
+ self.administrator_account = kwargs.get('administrator_account', None)
+ self.is_notebook_instance_compute = kwargs.get('is_notebook_instance_compute', None)
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = kwargs.get('administrator_account', None)
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None)
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param aml_compute: The list of virtual machine sizes supported by AmlCompute.
+ :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.aml_compute = kwargs.get('aml_compute', None)
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = kwargs.get('username', None)
+ self.password = kwargs.get('password', None)
+ self.public_key_data = kwargs.get('public_key_data', None)
+ self.private_key_data = kwargs.get('private_key_data', None)
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :ivar tenant_id: The tenant id associated with this workspace.
+ :vartype tenant_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(**kwargs)
+ self.workspace_id = None
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.key_vault = kwargs.get('key_vault', None)
+ self.application_insights = kwargs.get('application_insights', None)
+ self.container_registry = kwargs.get('container_registry', None)
+ self.storage_account = kwargs.get('storage_account', None)
+ self.discovery_url = kwargs.get('discovery_url', None)
+ self.provisioning_state = None
+ self.encryption = kwargs.get('encryption', None)
+ self.hbi_workspace = kwargs.get('hbi_workspace', False)
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False)
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None)
+ self.notebook_info = None
+ self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None)
+ self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None)
+ self.tenant_id = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+ self.value_format = kwargs.get('value_format', None)
+
+
+class WorkspaceConnectionDto(msrest.serialization.Model):
+ """object used for creating workspace connection.
+
+ :param name: Friendly name of the workspace connection.
+ :type name: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceConnectionDto, self).__init__(**kwargs)
+ self.name = kwargs.get('name', None)
+ self.category = kwargs.get('category', None)
+ self.target = kwargs.get('target', None)
+ self.auth_type = kwargs.get('auth_type', None)
+ self.value = kwargs.get('value', None)
+ self.value_format = kwargs.get('value_format', None)
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = kwargs.get('value', None)
+ self.next_link = kwargs.get('next_link', None)
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = kwargs.get('restrictions', None)
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = kwargs.get('tags', None)
+ self.sku = kwargs.get('sku', None)
+ self.identity = kwargs.get('identity', None)
+ self.description = kwargs.get('description', None)
+ self.friendly_name = kwargs.get('friendly_name', None)
+ self.image_build_compute = kwargs.get('image_build_compute', None)
+ self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None)
+ self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None)
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
new file mode 100644
index 00000000000..1a6ddbe068c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py
@@ -0,0 +1,7846 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+import datetime
+from typing import Dict, List, Optional, Union
+
+from azure.core.exceptions import HttpResponseError
+import msrest.serialization
+
+from ._azure_machine_learning_workspaces_enums import *
+
+
+class CreateServiceRequest(msrest.serialization.Model):
+ """The base class for creating a service.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceCreateRequest, CreateEndpointVariantRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceCreateRequest', 'Custom': 'CreateEndpointVariantRequest'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ **kwargs
+ ):
+ super(CreateServiceRequest, self).__init__(**kwargs)
+ self.description = description
+ self.kv_tags = kv_tags
+ self.properties = properties
+ self.keys = keys
+ self.compute_type = None # type: Optional[str]
+ self.environment_image_request = environment_image_request
+ self.location = location
+
+
+class AciServiceCreateRequest(CreateServiceRequest):
+ """AciServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param dns_name_label: The Dns label for the service.
+ :type dns_name_label: str
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ auth_enabled: Optional[bool] = False,
+ ssl_enabled: Optional[bool] = False,
+ app_insights_enabled: Optional[bool] = False,
+ data_collection: Optional["ModelDataCollection"] = None,
+ ssl_certificate: Optional[str] = None,
+ ssl_key: Optional[str] = None,
+ cname: Optional[str] = None,
+ dns_name_label: Optional[str] = None,
+ vnet_configuration: Optional["VnetConfiguration"] = None,
+ encryption_properties: Optional["EncryptionProperties"] = None,
+ **kwargs
+ ):
+ super(AciServiceCreateRequest, self).__init__(description=description, kv_tags=kv_tags, properties=properties, keys=keys, environment_image_request=environment_image_request, location=location, **kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = container_resource_requirements
+ self.auth_enabled = auth_enabled
+ self.ssl_enabled = ssl_enabled
+ self.app_insights_enabled = app_insights_enabled
+ self.data_collection = data_collection
+ self.ssl_certificate = ssl_certificate
+ self.ssl_key = ssl_key
+ self.cname = cname
+ self.dns_name_label = dns_name_label
+ self.vnet_configuration = vnet_configuration
+ self.encryption_properties = encryption_properties
+
+
+class ModelDataCollection(msrest.serialization.Model):
+ """The Model data collection properties.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelDataCollection, self).__init__(**kwargs)
+ self.event_hub_enabled = event_hub_enabled
+ self.storage_enabled = storage_enabled
+
+
+class AciServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class EncryptionProperties(msrest.serialization.Model):
+ """EncryptionProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vault_base_url: str,
+ key_name: str,
+ key_version: str,
+ **kwargs
+ ):
+ super(EncryptionProperties, self).__init__(**kwargs)
+ self.vault_base_url = vault_base_url
+ self.key_name = key_name
+ self.key_version = key_version
+
+
+class AciServiceCreateRequestEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vault_base_url: str,
+ key_name: str,
+ key_version: str,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestEncryptionProperties, self).__init__(vault_base_url=vault_base_url, key_name=key_name, key_version=key_version, **kwargs)
+
+
+class VnetConfiguration(msrest.serialization.Model):
+ """VnetConfiguration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(VnetConfiguration, self).__init__(**kwargs)
+ self.vnet_name = vnet_name
+ self.subnet_name = subnet_name
+
+
+class AciServiceCreateRequestVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AciServiceCreateRequestVnetConfiguration, self).__init__(vnet_name=vnet_name, subnet_name=subnet_name, **kwargs)
+
+
+class ServiceResponseBase(msrest.serialization.Model):
+ """The base service response. The correct inherited response based on computeType will be returned (ex. ACIServiceResponse).
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AciServiceResponse, AksVariantResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'ACI': 'AciServiceResponse', 'Custom': 'AksVariantResponse'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ **kwargs
+ ):
+ super(ServiceResponseBase, self).__init__(**kwargs)
+ self.description = description
+ self.kv_tags = kv_tags
+ self.properties = properties
+ self.state = None
+ self.error = None
+ self.compute_type = None # type: Optional[str]
+ self.deployment_type = deployment_type
+
+
+class AciServiceResponse(ServiceResponseBase):
+ """The response for an ACI service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param auth_enabled: Whether or not authentication is enabled on the service.
+ :type auth_enabled: bool
+ :param ssl_enabled: Whether or not SSL is enabled.
+ :type ssl_enabled: bool
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param ssl_certificate: The public SSL certificate in PEM format to use if SSL is enabled.
+ :type ssl_certificate: str
+ :param ssl_key: The public SSL key in PEM format for the certificate.
+ :type ssl_key: str
+ :param cname: The CName for the service.
+ :type cname: str
+ :param public_ip: The public IP address for the service.
+ :type public_ip: str
+ :param public_fqdn: The public Fqdn for the service.
+ :type public_fqdn: str
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ :param vnet_configuration: The virtual network configuration.
+ :type vnet_configuration: ~azure_machine_learning_workspaces.models.VnetConfiguration
+ :param encryption_properties: The encryption properties.
+ :type encryption_properties: ~azure_machine_learning_workspaces.models.EncryptionProperties
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'ssl_enabled': {'key': 'sslEnabled', 'type': 'bool'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'ssl_certificate': {'key': 'sslCertificate', 'type': 'str'},
+ 'ssl_key': {'key': 'sslKey', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'public_ip': {'key': 'publicIp', 'type': 'str'},
+ 'public_fqdn': {'key': 'publicFqdn', 'type': 'str'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ 'vnet_configuration': {'key': 'vnetConfiguration', 'type': 'VnetConfiguration'},
+ 'encryption_properties': {'key': 'encryptionProperties', 'type': 'EncryptionProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ location: Optional[str] = None,
+ auth_enabled: Optional[bool] = None,
+ ssl_enabled: Optional[bool] = None,
+ app_insights_enabled: Optional[bool] = None,
+ data_collection: Optional["ModelDataCollection"] = None,
+ ssl_certificate: Optional[str] = None,
+ ssl_key: Optional[str] = None,
+ cname: Optional[str] = None,
+ public_ip: Optional[str] = None,
+ public_fqdn: Optional[str] = None,
+ models: Optional[List["Model"]] = None,
+ environment_image_request: Optional["EnvironmentImageResponse"] = None,
+ vnet_configuration: Optional["VnetConfiguration"] = None,
+ encryption_properties: Optional["EncryptionProperties"] = None,
+ **kwargs
+ ):
+ super(AciServiceResponse, self).__init__(description=description, kv_tags=kv_tags, properties=properties, deployment_type=deployment_type, **kwargs)
+ self.compute_type = 'ACI' # type: str
+ self.container_resource_requirements = container_resource_requirements
+ self.scoring_uri = None
+ self.location = location
+ self.auth_enabled = auth_enabled
+ self.ssl_enabled = ssl_enabled
+ self.app_insights_enabled = app_insights_enabled
+ self.data_collection = data_collection
+ self.ssl_certificate = ssl_certificate
+ self.ssl_key = ssl_key
+ self.cname = cname
+ self.public_ip = public_ip
+ self.public_fqdn = public_fqdn
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.models = models
+ self.environment_image_request = environment_image_request
+ self.vnet_configuration = vnet_configuration
+ self.encryption_properties = encryption_properties
+
+
+class AciServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AciServiceResponseDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class AciServiceResponseEncryptionProperties(EncryptionProperties):
+ """The encryption properties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param vault_base_url: Required. vault base Url.
+ :type vault_base_url: str
+ :param key_name: Required. Encryption Key name.
+ :type key_name: str
+ :param key_version: Required. Encryption Key Version.
+ :type key_version: str
+ """
+
+ _validation = {
+ 'vault_base_url': {'required': True},
+ 'key_name': {'required': True},
+ 'key_version': {'required': True},
+ }
+
+ _attribute_map = {
+ 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'},
+ 'key_name': {'key': 'keyName', 'type': 'str'},
+ 'key_version': {'key': 'keyVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vault_base_url: str,
+ key_name: str,
+ key_version: str,
+ **kwargs
+ ):
+ super(AciServiceResponseEncryptionProperties, self).__init__(vault_base_url=vault_base_url, key_name=key_name, key_version=key_version, **kwargs)
+
+
+class EnvironmentImageResponse(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinitionResponse"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageResponse, self).__init__(**kwargs)
+ self.driver_program = driver_program
+ self.assets = assets
+ self.model_ids = model_ids
+ self.models = models
+ self.environment = environment
+ self.environment_reference = environment_reference
+
+
+class AciServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinitionResponse"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(AciServiceResponseEnvironmentImageRequest, self).__init__(driver_program=driver_program, assets=assets, model_ids=model_ids, models=models, environment=environment, environment_reference=environment_reference, **kwargs)
+
+
+class AciServiceResponseVnetConfiguration(VnetConfiguration):
+ """The virtual network configuration.
+
+ :param vnet_name: The name of the virtual network.
+ :type vnet_name: str
+ :param subnet_name: The name of the virtual network subnet.
+ :type subnet_name: str
+ """
+
+ _attribute_map = {
+ 'vnet_name': {'key': 'vnetName', 'type': 'str'},
+ 'subnet_name': {'key': 'subnetName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vnet_name: Optional[str] = None,
+ subnet_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AciServiceResponseVnetConfiguration, self).__init__(vnet_name=vnet_name, subnet_name=subnet_name, **kwargs)
+
+
+class Compute(msrest.serialization.Model):
+ """Machine Learning compute object.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, SynapseSpark, VirtualMachine.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'}
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(Compute, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = disable_local_auth
+
+
+class Aks(Compute):
+ """A Machine Learning compute based on AKS.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AKS properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["AksProperties"] = None,
+ **kwargs
+ ):
+ super(Aks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.properties = properties
+
+
+class ComputeSecrets(msrest.serialization.Model):
+ """Secrets related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+
+
+class AksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param user_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type user_kube_config: str
+ :param admin_kube_config: Content of kubeconfig file that can be used to connect to the
+ Kubernetes cluster.
+ :type admin_kube_config: str
+ :param image_pull_secret_name: Image registry pull secret.
+ :type image_pull_secret_name: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'},
+ 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'},
+ 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_kube_config: Optional[str] = None,
+ admin_kube_config: Optional[str] = None,
+ image_pull_secret_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.user_kube_config = user_kube_config
+ self.admin_kube_config = admin_kube_config
+ self.image_pull_secret_name = image_pull_secret_name
+
+
+class AksNetworkingConfiguration(msrest.serialization.Model):
+ """Advance configuration for AKS networking.
+
+ :param subnet_id: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet_id: str
+ :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must
+ not overlap with any Subnet IP ranges.
+ :type service_cidr: str
+ :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within
+ the Kubernetes service address range specified in serviceCidr.
+ :type dns_service_ip: str
+ :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It
+ must not overlap with any Subnet IP ranges or the Kubernetes service address range.
+ :type docker_bridge_cidr: str
+ """
+
+ _validation = {
+ 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'},
+ 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'},
+ }
+
+ _attribute_map = {
+ 'subnet_id': {'key': 'subnetId', 'type': 'str'},
+ 'service_cidr': {'key': 'serviceCidr', 'type': 'str'},
+ 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'},
+ 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ subnet_id: Optional[str] = None,
+ service_cidr: Optional[str] = None,
+ dns_service_ip: Optional[str] = None,
+ docker_bridge_cidr: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksNetworkingConfiguration, self).__init__(**kwargs)
+ self.subnet_id = subnet_id
+ self.service_cidr = service_cidr
+ self.dns_service_ip = dns_service_ip
+ self.docker_bridge_cidr = docker_bridge_cidr
+
+
+class AksProperties(msrest.serialization.Model):
+ """AKS properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param cluster_fqdn: Cluster full qualified domain name.
+ :type cluster_fqdn: str
+ :ivar system_services: System services.
+ :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService]
+ :param agent_count: Number of agents.
+ :type agent_count: int
+ :param agent_vm_size: Agent virtual machine size.
+ :type agent_vm_size: str
+ :param cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd",
+ "DenseProd", "DevTest". Default value: "FastProd".
+ :type cluster_purpose: str or ~azure_machine_learning_workspaces.models.ClusterPurpose
+ :param ssl_configuration: SSL configuration.
+ :type ssl_configuration: ~azure_machine_learning_workspaces.models.SslConfiguration
+ :param aks_networking_configuration: AKS networking configuration for vnet.
+ :type aks_networking_configuration:
+ ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration
+ :param load_balancer_type: Load Balancer Type. Possible values include: "PublicIp",
+ "InternalLoadBalancer". Default value: "PublicIp".
+ :type load_balancer_type: str or ~azure_machine_learning_workspaces.models.LoadBalancerType
+ :param load_balancer_subnet: Load Balancer Subnet.
+ :type load_balancer_subnet: str
+ """
+
+ _validation = {
+ 'system_services': {'readonly': True},
+ 'agent_count': {'minimum': 0},
+ }
+
+ _attribute_map = {
+ 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'},
+ 'system_services': {'key': 'systemServices', 'type': '[SystemService]'},
+ 'agent_count': {'key': 'agentCount', 'type': 'int'},
+ 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'},
+ 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'},
+ 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'},
+ 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'},
+ 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'},
+ 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cluster_fqdn: Optional[str] = None,
+ agent_count: Optional[int] = None,
+ agent_vm_size: Optional[str] = None,
+ cluster_purpose: Optional[Union[str, "ClusterPurpose"]] = "FastProd",
+ ssl_configuration: Optional["SslConfiguration"] = None,
+ aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None,
+ load_balancer_type: Optional[Union[str, "LoadBalancerType"]] = "PublicIp",
+ load_balancer_subnet: Optional[str] = None,
+ **kwargs
+ ):
+ super(AksProperties, self).__init__(**kwargs)
+ self.cluster_fqdn = cluster_fqdn
+ self.system_services = None
+ self.agent_count = agent_count
+ self.agent_vm_size = agent_vm_size
+ self.cluster_purpose = cluster_purpose
+ self.ssl_configuration = ssl_configuration
+ self.aks_networking_configuration = aks_networking_configuration
+ self.load_balancer_type = load_balancer_type
+ self.load_balancer_subnet = load_balancer_subnet
+
+
+class AksReplicaStatus(msrest.serialization.Model):
+ """AksReplicaStatus.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ desired_replicas: Optional[int] = None,
+ updated_replicas: Optional[int] = None,
+ available_replicas: Optional[int] = None,
+ error: Optional["MachineLearningServiceError"] = None,
+ **kwargs
+ ):
+ super(AksReplicaStatus, self).__init__(**kwargs)
+ self.desired_replicas = desired_replicas
+ self.updated_replicas = updated_replicas
+ self.available_replicas = available_replicas
+ self.error = error
+
+
+class MachineLearningServiceError(msrest.serialization.Model):
+ """Wrapper for error response to follow ARM guidelines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(MachineLearningServiceError, self).__init__(**kwargs)
+ self.error = None
+
+
+class AksReplicaStatusError(MachineLearningServiceError):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AksReplicaStatusError, self).__init__(**kwargs)
+
+
+class CreateEndpointVariantRequest(CreateServiceRequest):
+ """The Variant properties.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceCreateRequest.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceCreateRequest'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ **kwargs
+ ):
+ super(CreateEndpointVariantRequest, self).__init__(description=description, kv_tags=kv_tags, properties=properties, keys=keys, environment_image_request=environment_image_request, location=location, **kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = is_default
+ self.traffic_percentile = traffic_percentile
+ self.type = type
+
+
+class AksServiceCreateRequest(CreateEndpointVariantRequest):
+ """The request to create an AKS service.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The description of the service.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service properties dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param keys: The authentication keys.
+ :type keys: ~azure_machine_learning_workspaces.models.AuthKeys
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param environment_image_request: The Environment, models and assets needed for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageRequest
+ :param location: The name of the Azure location/region.
+ :type location: str
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param namespace: Kubernetes namespace for the service.
+ :type namespace: str
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'keys': {'key': 'keys', 'type': 'AuthKeys'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageRequest'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ keys: Optional["AuthKeys"] = None,
+ environment_image_request: Optional["EnvironmentImageRequest"] = None,
+ location: Optional[str] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ num_replicas: Optional[int] = None,
+ data_collection: Optional["ModelDataCollection"] = None,
+ compute_name: Optional[str] = None,
+ app_insights_enabled: Optional[bool] = None,
+ auto_scaler: Optional["AutoScaler"] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ max_concurrent_requests_per_container: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ namespace: Optional[str] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ auth_enabled: Optional[bool] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ aad_auth_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequest, self).__init__(description=description, kv_tags=kv_tags, properties=properties, keys=keys, environment_image_request=environment_image_request, location=location, is_default=is_default, traffic_percentile=traffic_percentile, type=type, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.num_replicas = num_replicas
+ self.data_collection = data_collection
+ self.compute_name = compute_name
+ self.app_insights_enabled = app_insights_enabled
+ self.auto_scaler = auto_scaler
+ self.container_resource_requirements = container_resource_requirements
+ self.max_concurrent_requests_per_container = max_concurrent_requests_per_container
+ self.max_queue_wait_ms = max_queue_wait_ms
+ self.namespace = namespace
+ self.scoring_timeout_ms = scoring_timeout_ms
+ self.auth_enabled = auth_enabled
+ self.liveness_probe_requirements = liveness_probe_requirements
+ self.aad_auth_enabled = aad_auth_enabled
+
+
+class AutoScaler(msrest.serialization.Model):
+ """The Auto Scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ autoscale_enabled: Optional[bool] = None,
+ min_replicas: Optional[int] = None,
+ max_replicas: Optional[int] = None,
+ target_utilization: Optional[int] = None,
+ refresh_period_in_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AutoScaler, self).__init__(**kwargs)
+ self.autoscale_enabled = autoscale_enabled
+ self.min_replicas = min_replicas
+ self.max_replicas = max_replicas
+ self.target_utilization = target_utilization
+ self.refresh_period_in_seconds = refresh_period_in_seconds
+
+
+class AksServiceCreateRequestAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ autoscale_enabled: Optional[bool] = None,
+ min_replicas: Optional[int] = None,
+ max_replicas: Optional[int] = None,
+ target_utilization: Optional[int] = None,
+ refresh_period_in_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestAutoScaler, self).__init__(autoscale_enabled=autoscale_enabled, min_replicas=min_replicas, max_replicas=max_replicas, target_utilization=target_utilization, refresh_period_in_seconds=refresh_period_in_seconds, **kwargs)
+
+
+class AksServiceCreateRequestDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class LivenessProbeRequirements(msrest.serialization.Model):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ success_threshold: Optional[int] = None,
+ timeout_seconds: Optional[int] = None,
+ period_seconds: Optional[int] = None,
+ initial_delay_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(LivenessProbeRequirements, self).__init__(**kwargs)
+ self.failure_threshold = failure_threshold
+ self.success_threshold = success_threshold
+ self.timeout_seconds = timeout_seconds
+ self.period_seconds = period_seconds
+ self.initial_delay_seconds = initial_delay_seconds
+
+
+class AksServiceCreateRequestLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ success_threshold: Optional[int] = None,
+ timeout_seconds: Optional[int] = None,
+ period_seconds: Optional[int] = None,
+ initial_delay_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceCreateRequestLivenessProbeRequirements, self).__init__(failure_threshold=failure_threshold, success_threshold=success_threshold, timeout_seconds=timeout_seconds, period_seconds=period_seconds, initial_delay_seconds=initial_delay_seconds, **kwargs)
+
+
+class AksVariantResponse(ServiceResponseBase):
+ """The response for an AKS variant.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AksServiceResponse.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AKS': 'AksServiceResponse'}
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ **kwargs
+ ):
+ super(AksVariantResponse, self).__init__(description=description, kv_tags=kv_tags, properties=properties, deployment_type=deployment_type, **kwargs)
+ self.compute_type = 'Custom' # type: str
+ self.is_default = is_default
+ self.traffic_percentile = traffic_percentile
+ self.type = type
+
+
+class AksServiceResponse(AksVariantResponse):
+ """The response for an AKS service.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param description: The service description.
+ :type description: str
+ :param kv_tags: The service tag dictionary. Tags are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The service property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :ivar state: The current state of the service. Possible values include: "Transitioning",
+ "Healthy", "Unhealthy", "Failed", "Unschedulable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.WebServiceState
+ :ivar error: The error details.
+ :vartype error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ :param compute_type: Required. The compute environment type for the service.Constant filled by
+ server. Possible values include: "ACI", "AKS".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeEnvironmentType
+ :param deployment_type: The deployment type for the service. Possible values include:
+ "GRPCRealtimeEndpoint", "HttpRealtimeEndpoint", "Batch".
+ :type deployment_type: str or ~azure_machine_learning_workspaces.models.DeploymentType
+ :param is_default: Is this the default variant.
+ :type is_default: bool
+ :param traffic_percentile: The amount of traffic variant receives.
+ :type traffic_percentile: float
+ :param type: The type of the variant. Possible values include: "Control", "Treatment".
+ :type type: str or ~azure_machine_learning_workspaces.models.VariantType
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param container_resource_requirements: The container resource requirements.
+ :type container_resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ :param max_concurrent_requests_per_container: The maximum number of concurrent requests per
+ container.
+ :type max_concurrent_requests_per_container: int
+ :param max_queue_wait_ms: Maximum time a request will wait in the queue (in milliseconds).
+ After this time, the service will return 503 (Service Unavailable).
+ :type max_queue_wait_ms: int
+ :param compute_name: The name of the compute resource.
+ :type compute_name: str
+ :param namespace: The Kubernetes namespace of the deployment.
+ :type namespace: str
+ :param num_replicas: The number of replicas on the cluster.
+ :type num_replicas: int
+ :param data_collection: Details of the data collection options specified.
+ :type data_collection: ~azure_machine_learning_workspaces.models.ModelDataCollection
+ :param app_insights_enabled: Whether or not Application Insights is enabled.
+ :type app_insights_enabled: bool
+ :param auto_scaler: The auto scaler properties.
+ :type auto_scaler: ~azure_machine_learning_workspaces.models.AutoScaler
+ :ivar scoring_uri: The Uri for sending scoring requests.
+ :vartype scoring_uri: str
+ :ivar deployment_status: The deployment status.
+ :vartype deployment_status: ~azure_machine_learning_workspaces.models.AksReplicaStatus
+ :param scoring_timeout_ms: The scoring timeout in milliseconds.
+ :type scoring_timeout_ms: int
+ :param liveness_probe_requirements: The liveness probe requirements.
+ :type liveness_probe_requirements:
+ ~azure_machine_learning_workspaces.models.LivenessProbeRequirements
+ :param auth_enabled: Whether or not authentication is enabled.
+ :type auth_enabled: bool
+ :param aad_auth_enabled: Whether or not AAD authentication is enabled.
+ :type aad_auth_enabled: bool
+ :ivar swagger_uri: The Uri for sending swagger requests.
+ :vartype swagger_uri: str
+ :ivar model_config_map: Details on the models and configurations.
+ :vartype model_config_map: dict[str, object]
+ :param environment_image_request: The Environment, models and assets used for inferencing.
+ :type environment_image_request:
+ ~azure_machine_learning_workspaces.models.EnvironmentImageResponse
+ """
+
+ _validation = {
+ 'state': {'readonly': True},
+ 'error': {'readonly': True},
+ 'compute_type': {'required': True},
+ 'scoring_uri': {'readonly': True},
+ 'deployment_status': {'readonly': True},
+ 'swagger_uri': {'readonly': True},
+ 'model_config_map': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'description': {'key': 'description', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'deployment_type': {'key': 'deploymentType', 'type': 'str'},
+ 'is_default': {'key': 'isDefault', 'type': 'bool'},
+ 'traffic_percentile': {'key': 'trafficPercentile', 'type': 'float'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'},
+ 'max_concurrent_requests_per_container': {'key': 'maxConcurrentRequestsPerContainer', 'type': 'int'},
+ 'max_queue_wait_ms': {'key': 'maxQueueWaitMs', 'type': 'int'},
+ 'compute_name': {'key': 'computeName', 'type': 'str'},
+ 'namespace': {'key': 'namespace', 'type': 'str'},
+ 'num_replicas': {'key': 'numReplicas', 'type': 'int'},
+ 'data_collection': {'key': 'dataCollection', 'type': 'ModelDataCollection'},
+ 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'},
+ 'auto_scaler': {'key': 'autoScaler', 'type': 'AutoScaler'},
+ 'scoring_uri': {'key': 'scoringUri', 'type': 'str'},
+ 'deployment_status': {'key': 'deploymentStatus', 'type': 'AksReplicaStatus'},
+ 'scoring_timeout_ms': {'key': 'scoringTimeoutMs', 'type': 'int'},
+ 'liveness_probe_requirements': {'key': 'livenessProbeRequirements', 'type': 'LivenessProbeRequirements'},
+ 'auth_enabled': {'key': 'authEnabled', 'type': 'bool'},
+ 'aad_auth_enabled': {'key': 'aadAuthEnabled', 'type': 'bool'},
+ 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'},
+ 'model_config_map': {'key': 'modelConfigMap', 'type': '{object}'},
+ 'environment_image_request': {'key': 'environmentImageRequest', 'type': 'EnvironmentImageResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ description: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ deployment_type: Optional[Union[str, "DeploymentType"]] = None,
+ is_default: Optional[bool] = None,
+ traffic_percentile: Optional[float] = None,
+ type: Optional[Union[str, "VariantType"]] = None,
+ models: Optional[List["Model"]] = None,
+ container_resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ max_concurrent_requests_per_container: Optional[int] = None,
+ max_queue_wait_ms: Optional[int] = None,
+ compute_name: Optional[str] = None,
+ namespace: Optional[str] = None,
+ num_replicas: Optional[int] = None,
+ data_collection: Optional["ModelDataCollection"] = None,
+ app_insights_enabled: Optional[bool] = None,
+ auto_scaler: Optional["AutoScaler"] = None,
+ scoring_timeout_ms: Optional[int] = None,
+ liveness_probe_requirements: Optional["LivenessProbeRequirements"] = None,
+ auth_enabled: Optional[bool] = None,
+ aad_auth_enabled: Optional[bool] = None,
+ environment_image_request: Optional["EnvironmentImageResponse"] = None,
+ **kwargs
+ ):
+ super(AksServiceResponse, self).__init__(description=description, kv_tags=kv_tags, properties=properties, deployment_type=deployment_type, is_default=is_default, traffic_percentile=traffic_percentile, type=type, **kwargs)
+ self.compute_type = 'AKS' # type: str
+ self.models = models
+ self.container_resource_requirements = container_resource_requirements
+ self.max_concurrent_requests_per_container = max_concurrent_requests_per_container
+ self.max_queue_wait_ms = max_queue_wait_ms
+ self.compute_name = compute_name
+ self.namespace = namespace
+ self.num_replicas = num_replicas
+ self.data_collection = data_collection
+ self.app_insights_enabled = app_insights_enabled
+ self.auto_scaler = auto_scaler
+ self.scoring_uri = None
+ self.deployment_status = None
+ self.scoring_timeout_ms = scoring_timeout_ms
+ self.liveness_probe_requirements = liveness_probe_requirements
+ self.auth_enabled = auth_enabled
+ self.aad_auth_enabled = aad_auth_enabled
+ self.swagger_uri = None
+ self.model_config_map = None
+ self.environment_image_request = environment_image_request
+
+
+class AksServiceResponseAutoScaler(AutoScaler):
+ """The auto scaler properties.
+
+ :param autoscale_enabled: Option to enable/disable auto scaling.
+ :type autoscale_enabled: bool
+ :param min_replicas: The minimum number of replicas to scale down to.
+ :type min_replicas: int
+ :param max_replicas: The maximum number of replicas in the cluster.
+ :type max_replicas: int
+ :param target_utilization: The target utilization percentage to use for determining whether to
+ scale the cluster.
+ :type target_utilization: int
+ :param refresh_period_in_seconds: The amount of seconds to wait between auto scale updates.
+ :type refresh_period_in_seconds: int
+ """
+
+ _attribute_map = {
+ 'autoscale_enabled': {'key': 'autoscaleEnabled', 'type': 'bool'},
+ 'min_replicas': {'key': 'minReplicas', 'type': 'int'},
+ 'max_replicas': {'key': 'maxReplicas', 'type': 'int'},
+ 'target_utilization': {'key': 'targetUtilization', 'type': 'int'},
+ 'refresh_period_in_seconds': {'key': 'refreshPeriodInSeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ autoscale_enabled: Optional[bool] = None,
+ min_replicas: Optional[int] = None,
+ max_replicas: Optional[int] = None,
+ target_utilization: Optional[int] = None,
+ refresh_period_in_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseAutoScaler, self).__init__(autoscale_enabled=autoscale_enabled, min_replicas=min_replicas, max_replicas=max_replicas, target_utilization=target_utilization, refresh_period_in_seconds=refresh_period_in_seconds, **kwargs)
+
+
+class AksServiceResponseDataCollection(ModelDataCollection):
+ """Details of the data collection options specified.
+
+ :param event_hub_enabled: Option for enabling/disabling Event Hub.
+ :type event_hub_enabled: bool
+ :param storage_enabled: Option for enabling/disabling storage.
+ :type storage_enabled: bool
+ """
+
+ _attribute_map = {
+ 'event_hub_enabled': {'key': 'eventHubEnabled', 'type': 'bool'},
+ 'storage_enabled': {'key': 'storageEnabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ event_hub_enabled: Optional[bool] = None,
+ storage_enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseDataCollection, self).__init__(event_hub_enabled=event_hub_enabled, storage_enabled=storage_enabled, **kwargs)
+
+
+class AksServiceResponseDeploymentStatus(AksReplicaStatus):
+ """The deployment status.
+
+ :param desired_replicas: The desired number of replicas.
+ :type desired_replicas: int
+ :param updated_replicas: The number of updated replicas.
+ :type updated_replicas: int
+ :param available_replicas: The number of available replicas.
+ :type available_replicas: int
+ :param error: The error details.
+ :type error: ~azure_machine_learning_workspaces.models.MachineLearningServiceError
+ """
+
+ _attribute_map = {
+ 'desired_replicas': {'key': 'desiredReplicas', 'type': 'int'},
+ 'updated_replicas': {'key': 'updatedReplicas', 'type': 'int'},
+ 'available_replicas': {'key': 'availableReplicas', 'type': 'int'},
+ 'error': {'key': 'error', 'type': 'MachineLearningServiceError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ desired_replicas: Optional[int] = None,
+ updated_replicas: Optional[int] = None,
+ available_replicas: Optional[int] = None,
+ error: Optional["MachineLearningServiceError"] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseDeploymentStatus, self).__init__(desired_replicas=desired_replicas, updated_replicas=updated_replicas, available_replicas=available_replicas, error=error, **kwargs)
+
+
+class AksServiceResponseEnvironmentImageRequest(EnvironmentImageResponse):
+ """The Environment, models and assets used for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinitionResponse
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinitionResponse'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinitionResponse"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseEnvironmentImageRequest, self).__init__(driver_program=driver_program, assets=assets, model_ids=model_ids, models=models, environment=environment, environment_reference=environment_reference, **kwargs)
+
+
+class AksServiceResponseLivenessProbeRequirements(LivenessProbeRequirements):
+ """The liveness probe requirements.
+
+ :param failure_threshold: The number of failures to allow before returning an unhealthy status.
+ :type failure_threshold: int
+ :param success_threshold: The number of successful probes before returning a healthy status.
+ :type success_threshold: int
+ :param timeout_seconds: The probe timeout in seconds.
+ :type timeout_seconds: int
+ :param period_seconds: The length of time between probes in seconds.
+ :type period_seconds: int
+ :param initial_delay_seconds: The delay before the first probe in seconds.
+ :type initial_delay_seconds: int
+ """
+
+ _attribute_map = {
+ 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'},
+ 'success_threshold': {'key': 'successThreshold', 'type': 'int'},
+ 'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
+ 'period_seconds': {'key': 'periodSeconds', 'type': 'int'},
+ 'initial_delay_seconds': {'key': 'initialDelaySeconds', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ failure_threshold: Optional[int] = None,
+ success_threshold: Optional[int] = None,
+ timeout_seconds: Optional[int] = None,
+ period_seconds: Optional[int] = None,
+ initial_delay_seconds: Optional[int] = None,
+ **kwargs
+ ):
+ super(AksServiceResponseLivenessProbeRequirements, self).__init__(failure_threshold=failure_threshold, success_threshold=success_threshold, timeout_seconds=timeout_seconds, period_seconds=period_seconds, initial_delay_seconds=initial_delay_seconds, **kwargs)
+
+
+class AmlCompute(Compute):
+ """An Azure Machine Learning compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: AML Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["AmlComputeProperties"] = None,
+ **kwargs
+ ):
+ super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.properties = properties
+
+
+class AmlComputeNodeInformation(msrest.serialization.Model):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar node_id: ID of the compute node.
+ :vartype node_id: str
+ :ivar private_ip_address: Private IP address of the compute node.
+ :vartype private_ip_address: str
+ :ivar public_ip_address: Public IP address of the compute node.
+ :vartype public_ip_address: str
+ :ivar port: SSH port number of the node.
+ :vartype port: int
+ :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable,
+ leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable",
+ "leaving", "preempted".
+ :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState
+ :ivar run_id: ID of the Experiment running on the node, if any else null.
+ :vartype run_id: str
+ """
+
+ _validation = {
+ 'node_id': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'port': {'readonly': True},
+ 'node_state': {'readonly': True},
+ 'run_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'node_id': {'key': 'nodeId', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'port': {'key': 'port', 'type': 'int'},
+ 'node_state': {'key': 'nodeState', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodeInformation, self).__init__(**kwargs)
+ self.node_id = None
+ self.private_ip_address = None
+ self.public_ip_address = None
+ self.port = None
+ self.node_state = None
+ self.run_id = None
+
+
+class ComputeNodesInformation(msrest.serialization.Model):
+ """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute.
+
+ You probably want to use the sub-classes and not this class directly. Known
+ sub-classes are: AmlComputeNodesInformation.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ _subtype_map = {
+ 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'}
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = None # type: Optional[str]
+ self.next_link = None
+
+
+class AmlComputeNodesInformation(ComputeNodesInformation):
+ """Compute node information related to a AmlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :ivar next_link: The continuation token.
+ :vartype next_link: str
+ :ivar nodes: The collection of returned AmlCompute nodes details.
+ :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation]
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'next_link': {'readonly': True},
+ 'nodes': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(AmlComputeNodesInformation, self).__init__(**kwargs)
+ self.compute_type = 'AmlCompute' # type: str
+ self.nodes = None
+
+
+class AmlComputeProperties(msrest.serialization.Model):
+ """AML Compute properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value:
+ "Linux".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.OsType
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated",
+ "LowPriority".
+ :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority
+ :param virtual_machine_image: Virtual Machine image for AML Compute - windows only.
+ :type virtual_machine_image: ~azure_machine_learning_workspaces.models.VirtualMachineImage
+ :param isolated_network: Network is isolated or not.
+ :type isolated_network: bool
+ :param scale_settings: Scale settings for AML Compute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ :param user_account_credentials: Credentials for an administrator user account that will be
+ created on each compute node.
+ :type user_account_credentials:
+ ~azure_machine_learning_workspaces.models.UserAccountCredentials
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param remote_login_port_public_access: State of the public SSH port. Possible values are:
+ Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled -
+ Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified -
+ Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined,
+ else is open all public nodes. It can be default only during cluster creation time, after
+ creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled",
+ "NotSpecified". Default value: "NotSpecified".
+ :type remote_login_port_public_access: str or
+ ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess
+ :ivar allocation_state: Allocation state of the compute. Possible values are: steady -
+ Indicates that the compute is not resizing. There are no changes to the number of compute nodes
+ in the compute in progress. A compute enters this state when it is created and when no
+ operations are being performed on the compute to change the number of compute nodes. resizing -
+ Indicates that the compute is resizing; that is, compute nodes are being added to or removed
+ from the compute. Possible values include: "Steady", "Resizing".
+ :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState
+ :ivar allocation_state_transition_time: The time at which the compute entered its current
+ allocation state.
+ :vartype allocation_state_transition_time: ~datetime.datetime
+ :ivar errors: Collection of errors encountered by various compute nodes during node setup.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar current_node_count: The number of compute nodes currently assigned to the compute.
+ :vartype current_node_count: int
+ :ivar target_node_count: The target number of compute nodes for the compute. If the
+ allocationState is resizing, this property denotes the target node count for the ongoing resize
+ operation. If the allocationState is steady, this property denotes the target node count for
+ the previous resize operation.
+ :vartype target_node_count: int
+ :ivar node_state_counts: Counts of various node states on the compute.
+ :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts
+ :param enable_node_public_ip: Enable or disable node public IP address provisioning. Possible
+ values are: Possible values are: true - Indicates that the compute nodes will have public IPs
+ provisioned. false - Indicates that the compute nodes will have a private endpoint and no
+ public IPs.
+ :type enable_node_public_ip: bool
+ """
+
+ _validation = {
+ 'allocation_state': {'readonly': True},
+ 'allocation_state_transition_time': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'current_node_count': {'readonly': True},
+ 'target_node_count': {'readonly': True},
+ 'node_state_counts': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'vm_priority': {'key': 'vmPriority', 'type': 'str'},
+ 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'},
+ 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'},
+ 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'},
+ 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'},
+ 'allocation_state': {'key': 'allocationState', 'type': 'str'},
+ 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'},
+ 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'},
+ 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'},
+ 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ os_type: Optional[Union[str, "OsType"]] = "Linux",
+ vm_size: Optional[str] = None,
+ vm_priority: Optional[Union[str, "VmPriority"]] = None,
+ virtual_machine_image: Optional["VirtualMachineImage"] = None,
+ isolated_network: Optional[bool] = None,
+ scale_settings: Optional["ScaleSettings"] = None,
+ user_account_credentials: Optional["UserAccountCredentials"] = None,
+ subnet: Optional["ResourceId"] = None,
+ remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified",
+ enable_node_public_ip: Optional[bool] = True,
+ **kwargs
+ ):
+ super(AmlComputeProperties, self).__init__(**kwargs)
+ self.os_type = os_type
+ self.vm_size = vm_size
+ self.vm_priority = vm_priority
+ self.virtual_machine_image = virtual_machine_image
+ self.isolated_network = isolated_network
+ self.scale_settings = scale_settings
+ self.user_account_credentials = user_account_credentials
+ self.subnet = subnet
+ self.remote_login_port_public_access = remote_login_port_public_access
+ self.allocation_state = None
+ self.allocation_state_transition_time = None
+ self.errors = None
+ self.current_node_count = None
+ self.target_node_count = None
+ self.node_state_counts = None
+ self.enable_node_public_ip = enable_node_public_ip
+
+
+class AmlUserFeature(msrest.serialization.Model):
+ """Features enabled for a workspace.
+
+ :param id: Specifies the feature ID.
+ :type id: str
+ :param display_name: Specifies the feature name.
+ :type display_name: str
+ :param description: Describes the feature for user experience.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ display_name: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(AmlUserFeature, self).__init__(**kwargs)
+ self.id = id
+ self.display_name = display_name
+ self.description = description
+
+
+class AssignedUser(msrest.serialization.Model):
+ """A user that can be assigned to a compute instance.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param object_id: Required. User’s AAD Object Id.
+ :type object_id: str
+ :param tenant_id: Required. User’s AAD Tenant Id.
+ :type tenant_id: str
+ """
+
+ _validation = {
+ 'object_id': {'required': True},
+ 'tenant_id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'object_id': {'key': 'objectId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ object_id: str,
+ tenant_id: str,
+ **kwargs
+ ):
+ super(AssignedUser, self).__init__(**kwargs)
+ self.object_id = object_id
+ self.tenant_id = tenant_id
+
+
+class AuthKeys(msrest.serialization.Model):
+ """AuthKeys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_key: Optional[str] = None,
+ secondary_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(AuthKeys, self).__init__(**kwargs)
+ self.primary_key = primary_key
+ self.secondary_key = secondary_key
+
+
+class AutoPauseProperties(msrest.serialization.Model):
+ """Auto pause properties.
+
+ :param delay_in_minutes:
+ :type delay_in_minutes: int
+ :param enabled:
+ :type enabled: bool
+ """
+
+ _attribute_map = {
+ 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ delay_in_minutes: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ **kwargs
+ ):
+ super(AutoPauseProperties, self).__init__(**kwargs)
+ self.delay_in_minutes = delay_in_minutes
+ self.enabled = enabled
+
+
+class AutoScaleProperties(msrest.serialization.Model):
+ """Auto scale properties.
+
+ :param min_node_count:
+ :type min_node_count: int
+ :param enabled:
+ :type enabled: bool
+ :param max_node_count:
+ :type max_node_count: int
+ """
+
+ _attribute_map = {
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'enabled': {'key': 'enabled', 'type': 'bool'},
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ min_node_count: Optional[int] = None,
+ enabled: Optional[bool] = None,
+ max_node_count: Optional[int] = None,
+ **kwargs
+ ):
+ super(AutoScaleProperties, self).__init__(**kwargs)
+ self.min_node_count = min_node_count
+ self.enabled = enabled
+ self.max_node_count = max_node_count
+
+
+class ClusterUpdateParameters(msrest.serialization.Model):
+ """AmlCompute update parameters.
+
+ :param scale_settings: Desired scale settings for the amlCompute.
+ :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings
+ """
+
+ _attribute_map = {
+ 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scale_settings: Optional["ScaleSettings"] = None,
+ **kwargs
+ ):
+ super(ClusterUpdateParameters, self).__init__(**kwargs)
+ self.scale_settings = scale_settings
+
+
+class ComputeInstance(Compute):
+ """An Azure Machine Learning compute instance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties: Compute Instance properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ComputeInstanceProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["ComputeInstanceProperties"] = None,
+ **kwargs
+ ):
+ super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'ComputeInstance' # type: str
+ self.properties = properties
+
+
+class ComputeInstanceApplication(msrest.serialization.Model):
+ """Defines an Aml Instance application and its connectivity endpoint URI.
+
+ :param display_name: Name of the ComputeInstance application.
+ :type display_name: str
+ :param endpoint_uri: Application' endpoint URI.
+ :type endpoint_uri: str
+ """
+
+ _attribute_map = {
+ 'display_name': {'key': 'displayName', 'type': 'str'},
+ 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ display_name: Optional[str] = None,
+ endpoint_uri: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceApplication, self).__init__(**kwargs)
+ self.display_name = display_name
+ self.endpoint_uri = endpoint_uri
+
+
+class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model):
+ """Defines all connectivity endpoints and properties for an ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar public_ip_address: Public IP Address of this ComputeInstance.
+ :vartype public_ip_address: str
+ :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in
+ which the compute instance is deployed).
+ :vartype private_ip_address: str
+ """
+
+ _validation = {
+ 'public_ip_address': {'readonly': True},
+ 'private_ip_address': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs)
+ self.public_ip_address = None
+ self.private_ip_address = None
+
+
+class ComputeInstanceCreatedBy(msrest.serialization.Model):
+ """Describes information on user who created this ComputeInstance.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_name: Name of the user.
+ :vartype user_name: str
+ :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization.
+ :vartype user_org_id: str
+ :ivar user_id: Uniquely identifies the user within his/her organization.
+ :vartype user_id: str
+ """
+
+ _validation = {
+ 'user_name': {'readonly': True},
+ 'user_org_id': {'readonly': True},
+ 'user_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_name': {'key': 'userName', 'type': 'str'},
+ 'user_org_id': {'key': 'userOrgId', 'type': 'str'},
+ 'user_id': {'key': 'userId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ComputeInstanceCreatedBy, self).__init__(**kwargs)
+ self.user_name = None
+ self.user_org_id = None
+ self.user_id = None
+
+
+class ComputeInstanceLastOperation(msrest.serialization.Model):
+ """The last operation on ComputeInstance.
+
+ :param operation_name: Name of the last operation. Possible values include: "Create", "Start",
+ "Stop", "Restart", "Reimage", "Delete".
+ :type operation_name: str or ~azure_machine_learning_workspaces.models.OperationName
+ :param operation_time: Time of the last operation.
+ :type operation_time: ~datetime.datetime
+ :param operation_status: Operation status. Possible values include: "InProgress", "Succeeded",
+ "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ReimageFailed", "DeleteFailed".
+ :type operation_status: str or ~azure_machine_learning_workspaces.models.OperationStatus
+ """
+
+ _attribute_map = {
+ 'operation_name': {'key': 'operationName', 'type': 'str'},
+ 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'},
+ 'operation_status': {'key': 'operationStatus', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ operation_name: Optional[Union[str, "OperationName"]] = None,
+ operation_time: Optional[datetime.datetime] = None,
+ operation_status: Optional[Union[str, "OperationStatus"]] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceLastOperation, self).__init__(**kwargs)
+ self.operation_name = operation_name
+ self.operation_time = operation_time
+ self.operation_status = operation_status
+
+
+class ComputeInstanceProperties(msrest.serialization.Model):
+ """Compute Instance properties.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param vm_size: Virtual Machine Size.
+ :type vm_size: str
+ :param subnet: Virtual network subnet resource ID the compute nodes belong to.
+ :type subnet: ~azure_machine_learning_workspaces.models.ResourceId
+ :param application_sharing_policy: Policy for sharing applications on this compute instance
+ among users of parent workspace. If Personal, only the creator can access applications on this
+ compute instance. When Shared, any workspace user can access applications on this instance
+ depending on his/her assigned role. Possible values include: "Personal", "Shared". Default
+ value: "Shared".
+ :type application_sharing_policy: str or
+ ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy
+ :param ssh_settings: Specifies policy and settings for SSH access.
+ :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings
+ :ivar connectivity_endpoints: Describes all connectivity endpoints available for this
+ ComputeInstance.
+ :vartype connectivity_endpoints:
+ ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints
+ :ivar applications: Describes available applications and their endpoints on this
+ ComputeInstance.
+ :vartype applications:
+ list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication]
+ :ivar created_by: Describes information on user who created this ComputeInstance.
+ :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy
+ :ivar errors: Collection of errors encountered on this ComputeInstance.
+ :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar state: The current state of this ComputeInstance. Possible values include: "Creating",
+ "CreateFailed", "Deleting", "Running", "Restarting", "JobRunning", "SettingUp", "SetupFailed",
+ "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", "Unknown", "Unusable".
+ :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState
+ :param compute_instance_authorization_type: The Compute Instance Authorization type. Available
+ values are personal (default). Possible values include: "personal". Default value: "personal".
+ :type compute_instance_authorization_type: str or
+ ~azure_machine_learning_workspaces.models.ComputeInstanceAuthorizationType
+ :param personal_compute_instance_settings: Settings for a personal compute instance.
+ :type personal_compute_instance_settings:
+ ~azure_machine_learning_workspaces.models.PersonalComputeInstanceSettings
+ :param setup_scripts: Details of customized scripts to execute for setting up the cluster.
+ :type setup_scripts: ~azure_machine_learning_workspaces.models.SetupScripts
+ :ivar last_operation: The last operation on ComputeInstance.
+ :vartype last_operation: ~azure_machine_learning_workspaces.models.ComputeInstanceLastOperation
+ """
+
+ _validation = {
+ 'connectivity_endpoints': {'readonly': True},
+ 'applications': {'readonly': True},
+ 'created_by': {'readonly': True},
+ 'errors': {'readonly': True},
+ 'state': {'readonly': True},
+ 'last_operation': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'vm_size': {'key': 'vmSize', 'type': 'str'},
+ 'subnet': {'key': 'subnet', 'type': 'ResourceId'},
+ 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'},
+ 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'},
+ 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'},
+ 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'},
+ 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'},
+ 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'},
+ 'state': {'key': 'state', 'type': 'str'},
+ 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'},
+ 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'},
+ 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'},
+ 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'},
+ }
+
+ def __init__(
+ self,
+ *,
+ vm_size: Optional[str] = None,
+ subnet: Optional["ResourceId"] = None,
+ application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared",
+ ssh_settings: Optional["ComputeInstanceSshSettings"] = None,
+ compute_instance_authorization_type: Optional[Union[str, "ComputeInstanceAuthorizationType"]] = "personal",
+ personal_compute_instance_settings: Optional["PersonalComputeInstanceSettings"] = None,
+ setup_scripts: Optional["SetupScripts"] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceProperties, self).__init__(**kwargs)
+ self.vm_size = vm_size
+ self.subnet = subnet
+ self.application_sharing_policy = application_sharing_policy
+ self.ssh_settings = ssh_settings
+ self.connectivity_endpoints = None
+ self.applications = None
+ self.created_by = None
+ self.errors = None
+ self.state = None
+ self.compute_instance_authorization_type = compute_instance_authorization_type
+ self.personal_compute_instance_settings = personal_compute_instance_settings
+ self.setup_scripts = setup_scripts
+ self.last_operation = None
+
+
+class ComputeInstanceSshSettings(msrest.serialization.Model):
+ """Specifies policy and settings for SSH access.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :param ssh_public_access: State of the public SSH port. Possible values are: Disabled -
+ Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the
+ public ssh port is open and accessible according to the VNet/subnet policy if applicable.
+ Possible values include: "Enabled", "Disabled". Default value: "Disabled".
+ :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess
+ :ivar admin_user_name: Describes the admin user name.
+ :vartype admin_user_name: str
+ :ivar ssh_port: Describes the port for connecting through SSH.
+ :vartype ssh_port: int
+ :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t
+ rsa -b 2048" to generate your SSH key pairs.
+ :type admin_public_key: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'readonly': True},
+ 'ssh_port': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'},
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled",
+ admin_public_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(ComputeInstanceSshSettings, self).__init__(**kwargs)
+ self.ssh_public_access = ssh_public_access
+ self.admin_user_name = None
+ self.ssh_port = None
+ self.admin_public_key = admin_public_key
+
+
+class Resource(msrest.serialization.Model):
+ """Azure Resource Manager resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ **kwargs
+ ):
+ super(Resource, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.identity = identity
+ self.location = location
+ self.type = None
+ self.tags = tags
+ self.sku = sku
+ self.system_data = None
+
+
+class ComputeResource(Resource):
+ """Machine Learning compute object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Compute properties.
+ :type properties: ~azure_machine_learning_workspaces.models.Compute
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties', 'type': 'Compute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ properties: Optional["Compute"] = None,
+ **kwargs
+ ):
+ super(ComputeResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.properties = properties
+
+
+class ContainerRegistry(msrest.serialization.Model):
+ """ContainerRegistry.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ **kwargs
+ ):
+ super(ContainerRegistry, self).__init__(**kwargs)
+ self.address = address
+ self.username = username
+ self.password = password
+
+
+class ContainerRegistryResponse(msrest.serialization.Model):
+ """ContainerRegistryResponse.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ **kwargs
+ ):
+ super(ContainerRegistryResponse, self).__init__(**kwargs)
+ self.address = address
+
+
+class ContainerResourceRequirements(msrest.serialization.Model):
+ """The resource requirements for the container (cpu and memory).
+
+ :param cpu: The minimum amount of CPU cores to be used by the container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu: float
+ :param cpu_limit: The maximum amount of CPU cores allowed to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type cpu_limit: float
+ :param memory_in_gb: The minimum amount of memory (in GB) to be used by the container. More
+ info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb: float
+ :param memory_in_gb_limit: The maximum amount of memory (in GB) allowed to be used by the
+ container. More info:
+ https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/.
+ :type memory_in_gb_limit: float
+ :param gpu: The number of GPU cores in the container.
+ :type gpu: int
+ :param fpga: The number of FPGA PCIE devices exposed to the container. Must be multiple of 2.
+ :type fpga: int
+ """
+
+ _attribute_map = {
+ 'cpu': {'key': 'cpu', 'type': 'float'},
+ 'cpu_limit': {'key': 'cpuLimit', 'type': 'float'},
+ 'memory_in_gb': {'key': 'memoryInGB', 'type': 'float'},
+ 'memory_in_gb_limit': {'key': 'memoryInGBLimit', 'type': 'float'},
+ 'gpu': {'key': 'gpu', 'type': 'int'},
+ 'fpga': {'key': 'fpga', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cpu: Optional[float] = None,
+ cpu_limit: Optional[float] = None,
+ memory_in_gb: Optional[float] = None,
+ memory_in_gb_limit: Optional[float] = None,
+ gpu: Optional[int] = None,
+ fpga: Optional[int] = None,
+ **kwargs
+ ):
+ super(ContainerResourceRequirements, self).__init__(**kwargs)
+ self.cpu = cpu
+ self.cpu_limit = cpu_limit
+ self.memory_in_gb = memory_in_gb
+ self.memory_in_gb_limit = memory_in_gb_limit
+ self.gpu = gpu
+ self.fpga = fpga
+
+
+class CosmosDbSettings(msrest.serialization.Model):
+ """CosmosDbSettings.
+
+ :param collections_throughput: The throughput of the collections in cosmosdb database.
+ :type collections_throughput: int
+ """
+
+ _attribute_map = {
+ 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ collections_throughput: Optional[int] = None,
+ **kwargs
+ ):
+ super(CosmosDbSettings, self).__init__(**kwargs)
+ self.collections_throughput = collections_throughput
+
+
+class EnvironmentImageRequest(msrest.serialization.Model):
+ """Request to create a Docker image based on Environment.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinition"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageRequest, self).__init__(**kwargs)
+ self.driver_program = driver_program
+ self.assets = assets
+ self.model_ids = model_ids
+ self.models = models
+ self.environment = environment
+ self.environment_reference = environment_reference
+
+
+class CreateServiceRequestEnvironmentImageRequest(EnvironmentImageRequest):
+ """The Environment, models and assets needed for inferencing.
+
+ :param driver_program: The name of the driver file.
+ :type driver_program: str
+ :param assets: The list of assets.
+ :type assets: list[~azure_machine_learning_workspaces.models.ImageAsset]
+ :param model_ids: The list of model Ids.
+ :type model_ids: list[str]
+ :param models: The list of models.
+ :type models: list[~azure_machine_learning_workspaces.models.Model]
+ :param environment: The details of the AZURE ML environment.
+ :type environment: ~azure_machine_learning_workspaces.models.ModelEnvironmentDefinition
+ :param environment_reference: The unique identifying details of the AZURE ML environment.
+ :type environment_reference: ~azure_machine_learning_workspaces.models.EnvironmentReference
+ """
+
+ _attribute_map = {
+ 'driver_program': {'key': 'driverProgram', 'type': 'str'},
+ 'assets': {'key': 'assets', 'type': '[ImageAsset]'},
+ 'model_ids': {'key': 'modelIds', 'type': '[str]'},
+ 'models': {'key': 'models', 'type': '[Model]'},
+ 'environment': {'key': 'environment', 'type': 'ModelEnvironmentDefinition'},
+ 'environment_reference': {'key': 'environmentReference', 'type': 'EnvironmentReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ driver_program: Optional[str] = None,
+ assets: Optional[List["ImageAsset"]] = None,
+ model_ids: Optional[List[str]] = None,
+ models: Optional[List["Model"]] = None,
+ environment: Optional["ModelEnvironmentDefinition"] = None,
+ environment_reference: Optional["EnvironmentReference"] = None,
+ **kwargs
+ ):
+ super(CreateServiceRequestEnvironmentImageRequest, self).__init__(driver_program=driver_program, assets=assets, model_ids=model_ids, models=models, environment=environment, environment_reference=environment_reference, **kwargs)
+
+
+class CreateServiceRequestKeys(AuthKeys):
+ """The authentication keys.
+
+ :param primary_key: The primary key.
+ :type primary_key: str
+ :param secondary_key: The secondary key.
+ :type secondary_key: str
+ """
+
+ _attribute_map = {
+ 'primary_key': {'key': 'primaryKey', 'type': 'str'},
+ 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ primary_key: Optional[str] = None,
+ secondary_key: Optional[str] = None,
+ **kwargs
+ ):
+ super(CreateServiceRequestKeys, self).__init__(primary_key=primary_key, secondary_key=secondary_key, **kwargs)
+
+
+class Databricks(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DatabricksProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DatabricksProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["DatabricksProperties"] = None,
+ **kwargs
+ ):
+ super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.properties = properties
+
+
+class DatabricksComputeSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on Databricks.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param databricks_access_token: access token for databricks account.
+ :type databricks_access_token: str
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksComputeSecrets, self).__init__(**kwargs)
+ self.compute_type = 'Databricks' # type: str
+ self.databricks_access_token = databricks_access_token
+
+
+class DatabricksProperties(msrest.serialization.Model):
+ """DatabricksProperties.
+
+ :param databricks_access_token: Databricks access token.
+ :type databricks_access_token: str
+ :param workspace_url: Workspace Url.
+ :type workspace_url: str
+ """
+
+ _attribute_map = {
+ 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'},
+ 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ databricks_access_token: Optional[str] = None,
+ workspace_url: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatabricksProperties, self).__init__(**kwargs)
+ self.databricks_access_token = databricks_access_token
+ self.workspace_url = workspace_url
+
+
+class DataFactory(Compute):
+ """A DataFactory compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'DataFactory' # type: str
+
+
+class DataLakeAnalytics(Compute):
+ """A DataLakeAnalytics compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.DataLakeAnalyticsProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["DataLakeAnalyticsProperties"] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'DataLakeAnalytics' # type: str
+ self.properties = properties
+
+
+class DataLakeAnalyticsProperties(msrest.serialization.Model):
+ """DataLakeAnalyticsProperties.
+
+ :param data_lake_store_account_name: DataLake Store Account Name.
+ :type data_lake_store_account_name: str
+ """
+
+ _attribute_map = {
+ 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ data_lake_store_account_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(DataLakeAnalyticsProperties, self).__init__(**kwargs)
+ self.data_lake_store_account_name = data_lake_store_account_name
+
+
+class DatasetReference(msrest.serialization.Model):
+ """The dataset reference object.
+
+ :param name: The name of the dataset reference.
+ :type name: str
+ :param id: The id of the dataset reference.
+ :type id: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ id: Optional[str] = None,
+ **kwargs
+ ):
+ super(DatasetReference, self).__init__(**kwargs)
+ self.name = name
+ self.id = id
+
+
+class EncryptionProperty(msrest.serialization.Model):
+ """EncryptionProperty.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param status: Required. Indicates whether or not the encryption is enabled for the workspace.
+ Possible values include: "Enabled", "Disabled".
+ :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus
+ :param identity: The identity that will be used to access the key vault for encryption at rest.
+ :type identity: ~azure_machine_learning_workspaces.models.IdentityForCmk
+ :param key_vault_properties: Required. Customer Key vault properties.
+ :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties
+ """
+
+ _validation = {
+ 'status': {'required': True},
+ 'key_vault_properties': {'required': True},
+ }
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'IdentityForCmk'},
+ 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Union[str, "EncryptionStatus"],
+ key_vault_properties: "KeyVaultProperties",
+ identity: Optional["IdentityForCmk"] = None,
+ **kwargs
+ ):
+ super(EncryptionProperty, self).__init__(**kwargs)
+ self.status = status
+ self.identity = identity
+ self.key_vault_properties = key_vault_properties
+
+
+class ModelEnvironmentDefinition(msrest.serialization.Model):
+ """ModelEnvironmentDefinition.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSection"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSection"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinition, self).__init__(**kwargs)
+ self.name = name
+ self.version = version
+ self.python = python
+ self.environment_variables = environment_variables
+ self.docker = docker
+ self.spark = spark
+ self.r = r
+ self.inferencing_stack_version = inferencing_stack_version
+
+
+class EnvironmentImageRequestEnvironment(ModelEnvironmentDefinition):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSection
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSection
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSection'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSection'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSection"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSection"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironment, self).__init__(name=name, version=version, python=python, environment_variables=environment_variables, docker=docker, spark=spark, r=r, inferencing_stack_version=inferencing_stack_version, **kwargs)
+
+
+class EnvironmentReference(msrest.serialization.Model):
+ """EnvironmentReference.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentReference, self).__init__(**kwargs)
+ self.name = name
+ self.version = version
+
+
+class EnvironmentImageRequestEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageRequestEnvironmentReference, self).__init__(name=name, version=version, **kwargs)
+
+
+class ModelEnvironmentDefinitionResponse(msrest.serialization.Model):
+ """ModelEnvironmentDefinitionResponse.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSectionResponse"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSectionResponse"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponse, self).__init__(**kwargs)
+ self.name = name
+ self.version = version
+ self.python = python
+ self.environment_variables = environment_variables
+ self.docker = docker
+ self.spark = spark
+ self.r = r
+ self.inferencing_stack_version = inferencing_stack_version
+
+
+class EnvironmentImageResponseEnvironment(ModelEnvironmentDefinitionResponse):
+ """The details of the AZURE ML environment.
+
+ :param name: The name of the environment.
+ :type name: str
+ :param version: The environment version.
+ :type version: str
+ :param python: Settings for a Python environment.
+ :type python: ~azure_machine_learning_workspaces.models.ModelPythonSection
+ :param environment_variables: Definition of environment variables to be defined in the
+ environment.
+ :type environment_variables: dict[str, str]
+ :param docker: The definition of a Docker container.
+ :type docker: ~azure_machine_learning_workspaces.models.ModelDockerSectionResponse
+ :param spark: The configuration for a Spark environment.
+ :type spark: ~azure_machine_learning_workspaces.models.ModelSparkSection
+ :param r: Settings for a R environment.
+ :type r: ~azure_machine_learning_workspaces.models.RSectionResponse
+ :param inferencing_stack_version: The inferencing stack version added to the image. To avoid
+ adding an inferencing stack, do not set this value. Valid values: "latest".
+ :type inferencing_stack_version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ 'python': {'key': 'python', 'type': 'ModelPythonSection'},
+ 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'},
+ 'docker': {'key': 'docker', 'type': 'ModelDockerSectionResponse'},
+ 'spark': {'key': 'spark', 'type': 'ModelSparkSection'},
+ 'r': {'key': 'r', 'type': 'RSectionResponse'},
+ 'inferencing_stack_version': {'key': 'inferencingStackVersion', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ python: Optional["ModelPythonSection"] = None,
+ environment_variables: Optional[Dict[str, str]] = None,
+ docker: Optional["ModelDockerSectionResponse"] = None,
+ spark: Optional["ModelSparkSection"] = None,
+ r: Optional["RSectionResponse"] = None,
+ inferencing_stack_version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironment, self).__init__(name=name, version=version, python=python, environment_variables=environment_variables, docker=docker, spark=spark, r=r, inferencing_stack_version=inferencing_stack_version, **kwargs)
+
+
+class EnvironmentImageResponseEnvironmentReference(EnvironmentReference):
+ """The unique identifying details of the AZURE ML environment.
+
+ :param name: Name of the environment.
+ :type name: str
+ :param version: Version of the environment.
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(EnvironmentImageResponseEnvironmentReference, self).__init__(name=name, version=version, **kwargs)
+
+
+class ErrorDetail(msrest.serialization.Model):
+ """Error detail information.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param code: Required. Error code.
+ :type code: str
+ :param message: Required. Error message.
+ :type message: str
+ """
+
+ _validation = {
+ 'code': {'required': True},
+ 'message': {'required': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ code: str,
+ message: str,
+ **kwargs
+ ):
+ super(ErrorDetail, self).__init__(**kwargs)
+ self.code = code
+ self.message = message
+
+
+class ErrorResponse(msrest.serialization.Model):
+ """Error response information.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar code: Error code.
+ :vartype code: str
+ :ivar message: Error message.
+ :vartype message: str
+ :ivar target: The target of the particular error.
+ :vartype target: str
+ :ivar details: An array of error detail objects.
+ :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail]
+ """
+
+ _validation = {
+ 'code': {'readonly': True},
+ 'message': {'readonly': True},
+ 'target': {'readonly': True},
+ 'details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'code': {'key': 'code', 'type': 'str'},
+ 'message': {'key': 'message', 'type': 'str'},
+ 'target': {'key': 'target', 'type': 'str'},
+ 'details': {'key': 'details', 'type': '[ErrorDetail]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ErrorResponse, self).__init__(**kwargs)
+ self.code = None
+ self.message = None
+ self.target = None
+ self.details = None
+
+
+class EstimatedVmPrice(msrest.serialization.Model):
+ """The estimated price info for using a VM of a particular OS type, tier, etc.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param retail_price: Required. The price charged for using the VM.
+ :type retail_price: float
+ :param os_type: Required. Operating system type used by the VM. Possible values include:
+ "Linux", "Windows".
+ :type os_type: str or ~azure_machine_learning_workspaces.models.VmPriceOsType
+ :param vm_tier: Required. The type of the VM. Possible values include: "Standard",
+ "LowPriority", "Spot".
+ :type vm_tier: str or ~azure_machine_learning_workspaces.models.VmTier
+ """
+
+ _validation = {
+ 'retail_price': {'required': True},
+ 'os_type': {'required': True},
+ 'vm_tier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'retail_price': {'key': 'retailPrice', 'type': 'float'},
+ 'os_type': {'key': 'osType', 'type': 'str'},
+ 'vm_tier': {'key': 'vmTier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ retail_price: float,
+ os_type: Union[str, "VmPriceOsType"],
+ vm_tier: Union[str, "VmTier"],
+ **kwargs
+ ):
+ super(EstimatedVmPrice, self).__init__(**kwargs)
+ self.retail_price = retail_price
+ self.os_type = os_type
+ self.vm_tier = vm_tier
+
+
+class EstimatedVmPrices(msrest.serialization.Model):
+ """The estimated price info for using a VM.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param billing_currency: Required. Three lettered code specifying the currency of the VM price.
+ Example: USD. Possible values include: "USD".
+ :type billing_currency: str or ~azure_machine_learning_workspaces.models.BillingCurrency
+ :param unit_of_measure: Required. The unit of time measurement for the specified VM price.
+ Example: OneHour. Possible values include: "OneHour".
+ :type unit_of_measure: str or ~azure_machine_learning_workspaces.models.UnitOfMeasure
+ :param values: Required. The list of estimated prices for using a VM of a particular OS type,
+ tier, etc.
+ :type values: list[~azure_machine_learning_workspaces.models.EstimatedVmPrice]
+ """
+
+ _validation = {
+ 'billing_currency': {'required': True},
+ 'unit_of_measure': {'required': True},
+ 'values': {'required': True},
+ }
+
+ _attribute_map = {
+ 'billing_currency': {'key': 'billingCurrency', 'type': 'str'},
+ 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[EstimatedVmPrice]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ billing_currency: Union[str, "BillingCurrency"],
+ unit_of_measure: Union[str, "UnitOfMeasure"],
+ values: List["EstimatedVmPrice"],
+ **kwargs
+ ):
+ super(EstimatedVmPrices, self).__init__(**kwargs)
+ self.billing_currency = billing_currency
+ self.unit_of_measure = unit_of_measure
+ self.values = values
+
+
+class HdInsight(Compute):
+ """A HDInsight compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.HdInsightProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'HdInsightProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["HdInsightProperties"] = None,
+ **kwargs
+ ):
+ super(HdInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'HDInsight' # type: str
+ self.properties = properties
+
+
+class HdInsightProperties(msrest.serialization.Model):
+ """HdInsightProperties.
+
+ :param ssh_port: Port open for ssh connections on the master node of the cluster.
+ :type ssh_port: int
+ :param address: Public IP address of the master node of the cluster.
+ :type address: str
+ :param administrator_account: Admin credentials for master node of the cluster.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _attribute_map = {
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(HdInsightProperties, self).__init__(**kwargs)
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+
+
+class Identity(msrest.serialization.Model):
+ """Identity for the resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of resource identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of resource.
+ :vartype tenant_id: str
+ :param type: The identity type. Possible values include: "SystemAssigned",
+ "SystemAssigned,UserAssigned", "UserAssigned", "None".
+ :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType
+ :param user_assigned_identities: The user assigned identities associated with the resource.
+ :type user_assigned_identities: dict[str,
+ ~azure_machine_learning_workspaces.models.UserAssignedIdentity]
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'},
+ }
+
+ def __init__(
+ self,
+ *,
+ type: Optional[Union[str, "ResourceIdentityType"]] = None,
+ user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None,
+ **kwargs
+ ):
+ super(Identity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.type = type
+ self.user_assigned_identities = user_assigned_identities
+
+
+class IdentityForCmk(msrest.serialization.Model):
+ """Identity that will be used to access key vault for encryption at rest.
+
+ :param user_assigned_identity: The ArmId of the user assigned identity that will be used to
+ access the customer managed key vault.
+ :type user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(IdentityForCmk, self).__init__(**kwargs)
+ self.user_assigned_identity = user_assigned_identity
+
+
+class ImageAsset(msrest.serialization.Model):
+ """An Image asset.
+
+ :param id: The Asset Id.
+ :type id: str
+ :param mime_type: The mime type.
+ :type mime_type: str
+ :param url: The Url of the Asset.
+ :type url: str
+ :param unpack: Whether the Asset is unpacked.
+ :type unpack: bool
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ mime_type: Optional[str] = None,
+ url: Optional[str] = None,
+ unpack: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ImageAsset, self).__init__(**kwargs)
+ self.id = id
+ self.mime_type = mime_type
+ self.url = url
+ self.unpack = unpack
+
+
+class KeyVaultProperties(msrest.serialization.Model):
+ """KeyVaultProperties.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned
+ encryption key is present.
+ :type key_vault_arm_id: str
+ :param key_identifier: Required. Key vault uri to access the encryption key.
+ :type key_identifier: str
+ :param identity_client_id: For future use - The client id of the identity which will be used to
+ access key vault.
+ :type identity_client_id: str
+ """
+
+ _validation = {
+ 'key_vault_arm_id': {'required': True},
+ 'key_identifier': {'required': True},
+ }
+
+ _attribute_map = {
+ 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'},
+ 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'},
+ 'identity_client_id': {'key': 'identityClientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ key_vault_arm_id: str,
+ key_identifier: str,
+ identity_client_id: Optional[str] = None,
+ **kwargs
+ ):
+ super(KeyVaultProperties, self).__init__(**kwargs)
+ self.key_vault_arm_id = key_vault_arm_id
+ self.key_identifier = key_identifier
+ self.identity_client_id = identity_client_id
+
+
+class ListAmlUserFeatureResult(msrest.serialization.Model):
+ """The List Aml user feature operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML user facing features.
+ :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature]
+ :ivar next_link: The URI to fetch the next page of AML user features information. Call
+ ListNext() with this to fetch the next page of AML user features information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[AmlUserFeature]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListAmlUserFeatureResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListNotebookKeysResult(msrest.serialization.Model):
+ """ListNotebookKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar primary_access_key:
+ :vartype primary_access_key: str
+ :ivar secondary_access_key:
+ :vartype secondary_access_key: str
+ """
+
+ _validation = {
+ 'primary_access_key': {'readonly': True},
+ 'secondary_access_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'},
+ 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListNotebookKeysResult, self).__init__(**kwargs)
+ self.primary_access_key = None
+ self.secondary_access_key = None
+
+
+class ListStorageAccountKeysResult(msrest.serialization.Model):
+ """ListStorageAccountKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListStorageAccountKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+
+
+class ListUsagesResult(msrest.serialization.Model):
+ """The List Usages operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of AML resource usages.
+ :vartype value: list[~azure_machine_learning_workspaces.models.Usage]
+ :ivar next_link: The URI to fetch the next page of AML resource usage information. Call
+ ListNext() with this to fetch the next page of AML resource usage information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Usage]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListUsagesResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class ListWorkspaceKeysResult(msrest.serialization.Model):
+ """ListWorkspaceKeysResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar user_storage_key:
+ :vartype user_storage_key: str
+ :ivar user_storage_resource_id:
+ :vartype user_storage_resource_id: str
+ :ivar app_insights_instrumentation_key:
+ :vartype app_insights_instrumentation_key: str
+ :ivar container_registry_credentials:
+ :vartype container_registry_credentials:
+ ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult
+ :ivar notebook_access_keys:
+ :vartype notebook_access_keys: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ """
+
+ _validation = {
+ 'user_storage_key': {'readonly': True},
+ 'user_storage_resource_id': {'readonly': True},
+ 'app_insights_instrumentation_key': {'readonly': True},
+ 'container_registry_credentials': {'readonly': True},
+ 'notebook_access_keys': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'},
+ 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'},
+ 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'},
+ 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'},
+ 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceKeysResult, self).__init__(**kwargs)
+ self.user_storage_key = None
+ self.user_storage_resource_id = None
+ self.app_insights_instrumentation_key = None
+ self.container_registry_credentials = None
+ self.notebook_access_keys = None
+
+
+class ListWorkspaceQuotas(msrest.serialization.Model):
+ """The List WorkspaceQuotasByVMFamily operation response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of Workspace Quotas by VM Family.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota]
+ :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family.
+ Call ListNext() with this to fetch the next page of Workspace Quota information.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ResourceQuota]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ListWorkspaceQuotas, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Model(msrest.serialization.Model):
+ """An Azure Machine Learning Model.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: The Model Id.
+ :type id: str
+ :param name: Required. The Model name.
+ :type name: str
+ :param framework: The Model framework.
+ :type framework: str
+ :param framework_version: The Model framework version.
+ :type framework_version: str
+ :param version: The Model version assigned by Model Management Service.
+ :type version: long
+ :param datasets: The list of datasets associated with the model.
+ :type datasets: list[~azure_machine_learning_workspaces.models.DatasetReference]
+ :param url: Required. The URL of the Model. Usually a SAS URL.
+ :type url: str
+ :param mime_type: Required. The MIME type of Model content. For more details about MIME type,
+ please open https://www.iana.org/assignments/media-types/media-types.xhtml.
+ :type mime_type: str
+ :param description: The Model description text.
+ :type description: str
+ :param created_time: The Model creation time (UTC).
+ :type created_time: ~datetime.datetime
+ :param modified_time: The Model last modified time (UTC).
+ :type modified_time: ~datetime.datetime
+ :param unpack: Indicates whether we need to unpack the Model during docker Image creation.
+ :type unpack: bool
+ :param parent_model_id: The Parent Model Id.
+ :type parent_model_id: str
+ :param run_id: The RunId that created this model.
+ :type run_id: str
+ :param experiment_name: The name of the experiment where this model was created.
+ :type experiment_name: str
+ :param kv_tags: The Model tag dictionary. Items are mutable.
+ :type kv_tags: dict[str, str]
+ :param properties: The Model property dictionary. Properties are immutable.
+ :type properties: dict[str, str]
+ :param derived_model_ids: Models derived from this model.
+ :type derived_model_ids: list[str]
+ :param sample_input_data: Sample Input Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_input_data: str
+ :param sample_output_data: Sample Output Data for the Model. A reference to a dataset in the
+ workspace in the format aml://dataset/{datasetId}.
+ :type sample_output_data: str
+ :param resource_requirements: Resource requirements for the model.
+ :type resource_requirements:
+ ~azure_machine_learning_workspaces.models.ContainerResourceRequirements
+ """
+
+ _validation = {
+ 'name': {'required': True},
+ 'url': {'required': True},
+ 'mime_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'framework': {'key': 'framework', 'type': 'str'},
+ 'framework_version': {'key': 'frameworkVersion', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'long'},
+ 'datasets': {'key': 'datasets', 'type': '[DatasetReference]'},
+ 'url': {'key': 'url', 'type': 'str'},
+ 'mime_type': {'key': 'mimeType', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
+ 'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
+ 'unpack': {'key': 'unpack', 'type': 'bool'},
+ 'parent_model_id': {'key': 'parentModelId', 'type': 'str'},
+ 'run_id': {'key': 'runId', 'type': 'str'},
+ 'experiment_name': {'key': 'experimentName', 'type': 'str'},
+ 'kv_tags': {'key': 'kvTags', 'type': '{str}'},
+ 'properties': {'key': 'properties', 'type': '{str}'},
+ 'derived_model_ids': {'key': 'derivedModelIds', 'type': '[str]'},
+ 'sample_input_data': {'key': 'sampleInputData', 'type': 'str'},
+ 'sample_output_data': {'key': 'sampleOutputData', 'type': 'str'},
+ 'resource_requirements': {'key': 'resourceRequirements', 'type': 'ContainerResourceRequirements'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: str,
+ url: str,
+ mime_type: str,
+ id: Optional[str] = None,
+ framework: Optional[str] = None,
+ framework_version: Optional[str] = None,
+ version: Optional[int] = None,
+ datasets: Optional[List["DatasetReference"]] = None,
+ description: Optional[str] = None,
+ created_time: Optional[datetime.datetime] = None,
+ modified_time: Optional[datetime.datetime] = None,
+ unpack: Optional[bool] = None,
+ parent_model_id: Optional[str] = None,
+ run_id: Optional[str] = None,
+ experiment_name: Optional[str] = None,
+ kv_tags: Optional[Dict[str, str]] = None,
+ properties: Optional[Dict[str, str]] = None,
+ derived_model_ids: Optional[List[str]] = None,
+ sample_input_data: Optional[str] = None,
+ sample_output_data: Optional[str] = None,
+ resource_requirements: Optional["ContainerResourceRequirements"] = None,
+ **kwargs
+ ):
+ super(Model, self).__init__(**kwargs)
+ self.id = id
+ self.name = name
+ self.framework = framework
+ self.framework_version = framework_version
+ self.version = version
+ self.datasets = datasets
+ self.url = url
+ self.mime_type = mime_type
+ self.description = description
+ self.created_time = created_time
+ self.modified_time = modified_time
+ self.unpack = unpack
+ self.parent_model_id = parent_model_id
+ self.run_id = run_id
+ self.experiment_name = experiment_name
+ self.kv_tags = kv_tags
+ self.properties = properties
+ self.derived_model_ids = derived_model_ids
+ self.sample_input_data = sample_input_data
+ self.sample_output_data = sample_output_data
+ self.resource_requirements = resource_requirements
+
+
+class ModelDockerSection(msrest.serialization.Model):
+ """ModelDockerSection.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistry"] = None,
+ **kwargs
+ ):
+ super(ModelDockerSection, self).__init__(**kwargs)
+ self.base_image = base_image
+ self.base_dockerfile = base_dockerfile
+ self.base_image_registry = base_image_registry
+
+
+class ModelDockerSectionBaseImageRegistry(ContainerRegistry):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ :param username:
+ :type username: str
+ :param password:
+ :type password: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelDockerSectionBaseImageRegistry, self).__init__(address=address, username=username, password=password, **kwargs)
+
+
+class ModelDockerSectionResponse(msrest.serialization.Model):
+ """ModelDockerSectionResponse.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistryResponse"] = None,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponse, self).__init__(**kwargs)
+ self.base_image = base_image
+ self.base_dockerfile = base_dockerfile
+ self.base_image_registry = base_image_registry
+
+
+class ModelDockerSectionResponseBaseImageRegistry(ContainerRegistryResponse):
+ """Image registry that contains the base image.
+
+ :param address:
+ :type address: str
+ """
+
+ _attribute_map = {
+ 'address': {'key': 'address', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ address: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelDockerSectionResponseBaseImageRegistry, self).__init__(address=address, **kwargs)
+
+
+class ModelEnvironmentDefinitionDocker(ModelDockerSection):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistry
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistry'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistry"] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionDocker, self).__init__(base_image=base_image, base_dockerfile=base_dockerfile, base_image_registry=base_image_registry, **kwargs)
+
+
+class ModelPythonSection(msrest.serialization.Model):
+ """ModelPythonSection.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interpreter_path: Optional[str] = None,
+ user_managed_dependencies: Optional[bool] = None,
+ conda_dependencies: Optional[object] = None,
+ base_conda_environment: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelPythonSection, self).__init__(**kwargs)
+ self.interpreter_path = interpreter_path
+ self.user_managed_dependencies = user_managed_dependencies
+ self.conda_dependencies = conda_dependencies
+ self.base_conda_environment = base_conda_environment
+
+
+class ModelEnvironmentDefinitionPython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interpreter_path: Optional[str] = None,
+ user_managed_dependencies: Optional[bool] = None,
+ conda_dependencies: Optional[object] = None,
+ base_conda_environment: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionPython, self).__init__(interpreter_path=interpreter_path, user_managed_dependencies=user_managed_dependencies, conda_dependencies=conda_dependencies, base_conda_environment=base_conda_environment, **kwargs)
+
+
+class RSection(msrest.serialization.Model):
+ """RSection.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackage"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(RSection, self).__init__(**kwargs)
+ self.r_version = r_version
+ self.user_managed = user_managed
+ self.rscript_path = rscript_path
+ self.snapshot_date = snapshot_date
+ self.cran_packages = cran_packages
+ self.git_hub_packages = git_hub_packages
+ self.custom_url_packages = custom_url_packages
+ self.bio_conductor_packages = bio_conductor_packages
+
+
+class ModelEnvironmentDefinitionR(RSection):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackage]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackage]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackage"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionR, self).__init__(r_version=r_version, user_managed=user_managed, rscript_path=rscript_path, snapshot_date=snapshot_date, cran_packages=cran_packages, git_hub_packages=git_hub_packages, custom_url_packages=custom_url_packages, bio_conductor_packages=bio_conductor_packages, **kwargs)
+
+
+class ModelEnvironmentDefinitionResponseDocker(ModelDockerSectionResponse):
+ """The definition of a Docker container.
+
+ :param base_image: Base image used for Docker-based runs. Mutually exclusive with
+ BaseDockerfile.
+ :type base_image: str
+ :param base_dockerfile: Base Dockerfile used for Docker-based runs. Mutually exclusive with
+ BaseImage.
+ :type base_dockerfile: str
+ :param base_image_registry: Image registry that contains the base image.
+ :type base_image_registry: ~azure_machine_learning_workspaces.models.ContainerRegistryResponse
+ """
+
+ _attribute_map = {
+ 'base_image': {'key': 'baseImage', 'type': 'str'},
+ 'base_dockerfile': {'key': 'baseDockerfile', 'type': 'str'},
+ 'base_image_registry': {'key': 'baseImageRegistry', 'type': 'ContainerRegistryResponse'},
+ }
+
+ def __init__(
+ self,
+ *,
+ base_image: Optional[str] = None,
+ base_dockerfile: Optional[str] = None,
+ base_image_registry: Optional["ContainerRegistryResponse"] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseDocker, self).__init__(base_image=base_image, base_dockerfile=base_dockerfile, base_image_registry=base_image_registry, **kwargs)
+
+
+class ModelEnvironmentDefinitionResponsePython(ModelPythonSection):
+ """Settings for a Python environment.
+
+ :param interpreter_path: The python interpreter path to use if an environment build is not
+ required. The path specified gets used to call the user script.
+ :type interpreter_path: str
+ :param user_managed_dependencies: True means that AzureML reuses an existing python
+ environment; False means that AzureML will create a python environment based on the Conda
+ dependencies specification.
+ :type user_managed_dependencies: bool
+ :param conda_dependencies: A JObject containing Conda dependencies.
+ :type conda_dependencies: object
+ :param base_conda_environment:
+ :type base_conda_environment: str
+ """
+
+ _attribute_map = {
+ 'interpreter_path': {'key': 'interpreterPath', 'type': 'str'},
+ 'user_managed_dependencies': {'key': 'userManagedDependencies', 'type': 'bool'},
+ 'conda_dependencies': {'key': 'condaDependencies', 'type': 'object'},
+ 'base_conda_environment': {'key': 'baseCondaEnvironment', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ interpreter_path: Optional[str] = None,
+ user_managed_dependencies: Optional[bool] = None,
+ conda_dependencies: Optional[object] = None,
+ base_conda_environment: Optional[str] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponsePython, self).__init__(interpreter_path=interpreter_path, user_managed_dependencies=user_managed_dependencies, conda_dependencies=conda_dependencies, base_conda_environment=base_conda_environment, **kwargs)
+
+
+class RSectionResponse(msrest.serialization.Model):
+ """RSectionResponse.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackageResponse"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(RSectionResponse, self).__init__(**kwargs)
+ self.r_version = r_version
+ self.user_managed = user_managed
+ self.rscript_path = rscript_path
+ self.snapshot_date = snapshot_date
+ self.cran_packages = cran_packages
+ self.git_hub_packages = git_hub_packages
+ self.custom_url_packages = custom_url_packages
+ self.bio_conductor_packages = bio_conductor_packages
+
+
+class ModelEnvironmentDefinitionResponseR(RSectionResponse):
+ """Settings for a R environment.
+
+ :param r_version: The version of R to be installed.
+ :type r_version: str
+ :param user_managed: Indicates whether the environment is managed by user or by AzureML.
+ :type user_managed: bool
+ :param rscript_path: The Rscript path to use if an environment build is not required.
+ The path specified gets used to call the user script.
+ :type rscript_path: str
+ :param snapshot_date: Date of MRAN snapshot to use in YYYY-MM-DD format, e.g. "2019-04-17".
+ :type snapshot_date: str
+ :param cran_packages: The CRAN packages to use.
+ :type cran_packages: list[~azure_machine_learning_workspaces.models.RCranPackage]
+ :param git_hub_packages: The packages directly from GitHub.
+ :type git_hub_packages: list[~azure_machine_learning_workspaces.models.RGitHubPackageResponse]
+ :param custom_url_packages: The packages from custom urls.
+ :type custom_url_packages: list[str]
+ :param bio_conductor_packages: The packages from Bioconductor.
+ :type bio_conductor_packages: list[str]
+ """
+
+ _attribute_map = {
+ 'r_version': {'key': 'rVersion', 'type': 'str'},
+ 'user_managed': {'key': 'userManaged', 'type': 'bool'},
+ 'rscript_path': {'key': 'rscriptPath', 'type': 'str'},
+ 'snapshot_date': {'key': 'snapshotDate', 'type': 'str'},
+ 'cran_packages': {'key': 'cranPackages', 'type': '[RCranPackage]'},
+ 'git_hub_packages': {'key': 'gitHubPackages', 'type': '[RGitHubPackageResponse]'},
+ 'custom_url_packages': {'key': 'customUrlPackages', 'type': '[str]'},
+ 'bio_conductor_packages': {'key': 'bioConductorPackages', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ r_version: Optional[str] = None,
+ user_managed: Optional[bool] = None,
+ rscript_path: Optional[str] = None,
+ snapshot_date: Optional[str] = None,
+ cran_packages: Optional[List["RCranPackage"]] = None,
+ git_hub_packages: Optional[List["RGitHubPackageResponse"]] = None,
+ custom_url_packages: Optional[List[str]] = None,
+ bio_conductor_packages: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseR, self).__init__(r_version=r_version, user_managed=user_managed, rscript_path=rscript_path, snapshot_date=snapshot_date, cran_packages=cran_packages, git_hub_packages=git_hub_packages, custom_url_packages=custom_url_packages, bio_conductor_packages=bio_conductor_packages, **kwargs)
+
+
+class ModelSparkSection(msrest.serialization.Model):
+ """ModelSparkSection.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repositories: Optional[List[str]] = None,
+ packages: Optional[List["SparkMavenPackage"]] = None,
+ precache_packages: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelSparkSection, self).__init__(**kwargs)
+ self.repositories = repositories
+ self.packages = packages
+ self.precache_packages = precache_packages
+
+
+class ModelEnvironmentDefinitionResponseSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repositories: Optional[List[str]] = None,
+ packages: Optional[List["SparkMavenPackage"]] = None,
+ precache_packages: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionResponseSpark, self).__init__(repositories=repositories, packages=packages, precache_packages=precache_packages, **kwargs)
+
+
+class ModelEnvironmentDefinitionSpark(ModelSparkSection):
+ """The configuration for a Spark environment.
+
+ :param repositories: The list of spark repositories.
+ :type repositories: list[str]
+ :param packages: The Spark packages to use.
+ :type packages: list[~azure_machine_learning_workspaces.models.SparkMavenPackage]
+ :param precache_packages: Whether to precache the packages.
+ :type precache_packages: bool
+ """
+
+ _attribute_map = {
+ 'repositories': {'key': 'repositories', 'type': '[str]'},
+ 'packages': {'key': 'packages', 'type': '[SparkMavenPackage]'},
+ 'precache_packages': {'key': 'precachePackages', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repositories: Optional[List[str]] = None,
+ packages: Optional[List["SparkMavenPackage"]] = None,
+ precache_packages: Optional[bool] = None,
+ **kwargs
+ ):
+ super(ModelEnvironmentDefinitionSpark, self).__init__(repositories=repositories, packages=packages, precache_packages=precache_packages, **kwargs)
+
+
+class NodeStateCounts(msrest.serialization.Model):
+ """Counts of various compute node states on the amlCompute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar idle_node_count: Number of compute nodes in idle state.
+ :vartype idle_node_count: int
+ :ivar running_node_count: Number of compute nodes which are running jobs.
+ :vartype running_node_count: int
+ :ivar preparing_node_count: Number of compute nodes which are being prepared.
+ :vartype preparing_node_count: int
+ :ivar unusable_node_count: Number of compute nodes which are in unusable state.
+ :vartype unusable_node_count: int
+ :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute.
+ :vartype leaving_node_count: int
+ :ivar preempted_node_count: Number of compute nodes which are in preempted state.
+ :vartype preempted_node_count: int
+ """
+
+ _validation = {
+ 'idle_node_count': {'readonly': True},
+ 'running_node_count': {'readonly': True},
+ 'preparing_node_count': {'readonly': True},
+ 'unusable_node_count': {'readonly': True},
+ 'leaving_node_count': {'readonly': True},
+ 'preempted_node_count': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'},
+ 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'},
+ 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'},
+ 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'},
+ 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'},
+ 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NodeStateCounts, self).__init__(**kwargs)
+ self.idle_node_count = None
+ self.running_node_count = None
+ self.preparing_node_count = None
+ self.unusable_node_count = None
+ self.leaving_node_count = None
+ self.preempted_node_count = None
+
+
+class NotebookAccessTokenResult(msrest.serialization.Model):
+ """NotebookAccessTokenResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar notebook_resource_id:
+ :vartype notebook_resource_id: str
+ :ivar host_name:
+ :vartype host_name: str
+ :ivar public_dns:
+ :vartype public_dns: str
+ :ivar access_token:
+ :vartype access_token: str
+ :ivar token_type:
+ :vartype token_type: str
+ :ivar expires_in:
+ :vartype expires_in: int
+ :ivar refresh_token:
+ :vartype refresh_token: str
+ :ivar scope:
+ :vartype scope: str
+ """
+
+ _validation = {
+ 'notebook_resource_id': {'readonly': True},
+ 'host_name': {'readonly': True},
+ 'public_dns': {'readonly': True},
+ 'access_token': {'readonly': True},
+ 'token_type': {'readonly': True},
+ 'expires_in': {'readonly': True},
+ 'refresh_token': {'readonly': True},
+ 'scope': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'},
+ 'host_name': {'key': 'hostName', 'type': 'str'},
+ 'public_dns': {'key': 'publicDns', 'type': 'str'},
+ 'access_token': {'key': 'accessToken', 'type': 'str'},
+ 'token_type': {'key': 'tokenType', 'type': 'str'},
+ 'expires_in': {'key': 'expiresIn', 'type': 'int'},
+ 'refresh_token': {'key': 'refreshToken', 'type': 'str'},
+ 'scope': {'key': 'scope', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(NotebookAccessTokenResult, self).__init__(**kwargs)
+ self.notebook_resource_id = None
+ self.host_name = None
+ self.public_dns = None
+ self.access_token = None
+ self.token_type = None
+ self.expires_in = None
+ self.refresh_token = None
+ self.scope = None
+
+
+class NotebookPreparationError(msrest.serialization.Model):
+ """NotebookPreparationError.
+
+ :param error_message:
+ :type error_message: str
+ :param status_code:
+ :type status_code: int
+ """
+
+ _attribute_map = {
+ 'error_message': {'key': 'errorMessage', 'type': 'str'},
+ 'status_code': {'key': 'statusCode', 'type': 'int'},
+ }
+
+ def __init__(
+ self,
+ *,
+ error_message: Optional[str] = None,
+ status_code: Optional[int] = None,
+ **kwargs
+ ):
+ super(NotebookPreparationError, self).__init__(**kwargs)
+ self.error_message = error_message
+ self.status_code = status_code
+
+
+class NotebookResourceInfo(msrest.serialization.Model):
+ """NotebookResourceInfo.
+
+ :param fqdn:
+ :type fqdn: str
+ :param resource_id: the data plane resourceId that used to initialize notebook component.
+ :type resource_id: str
+ :param notebook_preparation_error: The error that occurs when preparing notebook.
+ :type notebook_preparation_error:
+ ~azure_machine_learning_workspaces.models.NotebookPreparationError
+ """
+
+ _attribute_map = {
+ 'fqdn': {'key': 'fqdn', 'type': 'str'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'},
+ }
+
+ def __init__(
+ self,
+ *,
+ fqdn: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ notebook_preparation_error: Optional["NotebookPreparationError"] = None,
+ **kwargs
+ ):
+ super(NotebookResourceInfo, self).__init__(**kwargs)
+ self.fqdn = fqdn
+ self.resource_id = resource_id
+ self.notebook_preparation_error = notebook_preparation_error
+
+
+class Operation(msrest.serialization.Model):
+ """Azure Machine Learning workspace REST API operation.
+
+ :param name: Operation name: {provider}/{resource}/{operation}.
+ :type name: str
+ :param display: Display name of operation.
+ :type display: ~azure_machine_learning_workspaces.models.OperationDisplay
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'display': {'key': 'display', 'type': 'OperationDisplay'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ display: Optional["OperationDisplay"] = None,
+ **kwargs
+ ):
+ super(Operation, self).__init__(**kwargs)
+ self.name = name
+ self.display = display
+
+
+class OperationDisplay(msrest.serialization.Model):
+ """Display name of operation.
+
+ :param provider: The resource provider name: Microsoft.MachineLearningExperimentation.
+ :type provider: str
+ :param resource: The resource on which the operation is performed.
+ :type resource: str
+ :param operation: The operation that users can perform.
+ :type operation: str
+ :param description: The description for the operation.
+ :type description: str
+ """
+
+ _attribute_map = {
+ 'provider': {'key': 'provider', 'type': 'str'},
+ 'resource': {'key': 'resource', 'type': 'str'},
+ 'operation': {'key': 'operation', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ provider: Optional[str] = None,
+ resource: Optional[str] = None,
+ operation: Optional[str] = None,
+ description: Optional[str] = None,
+ **kwargs
+ ):
+ super(OperationDisplay, self).__init__(**kwargs)
+ self.provider = provider
+ self.resource = resource
+ self.operation = operation
+ self.description = description
+
+
+class OperationListResult(msrest.serialization.Model):
+ """An array of operations supported by the resource provider.
+
+ :param value: List of AML workspace operations supported by the AML workspace resource
+ provider.
+ :type value: list[~azure_machine_learning_workspaces.models.Operation]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Operation]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Operation"]] = None,
+ **kwargs
+ ):
+ super(OperationListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PaginatedComputeResourcesList(msrest.serialization.Model):
+ """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope.
+
+ :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :type value: list[~azure_machine_learning_workspaces.models.ComputeResource]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ComputeResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["ComputeResource"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedComputeResourcesList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class PaginatedServiceList(msrest.serialization.Model):
+ """Paginated list of Machine Learning service objects wrapped in ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope.
+ :vartype value: list[~azure_machine_learning_workspaces.models.ServiceResource]
+ :ivar next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[ServiceResource]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PaginatedServiceList, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class PaginatedWorkspaceConnectionsList(msrest.serialization.Model):
+ """Paginated list of Workspace connection objects.
+
+ :param value: An array of Workspace connection objects.
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection]
+ :param next_link: A continuation link (absolute URI) to the next page of results in the list.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceConnection]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceConnection"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class Password(msrest.serialization.Model):
+ """Password.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name:
+ :vartype name: str
+ :ivar value:
+ :vartype value: str
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Password, self).__init__(**kwargs)
+ self.name = None
+ self.value = None
+
+
+class PersonalComputeInstanceSettings(msrest.serialization.Model):
+ """Settings for a personal compute instance.
+
+ :param assigned_user: A user explicitly assigned to a personal compute instance.
+ :type assigned_user: ~azure_machine_learning_workspaces.models.AssignedUser
+ """
+
+ _attribute_map = {
+ 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'},
+ }
+
+ def __init__(
+ self,
+ *,
+ assigned_user: Optional["AssignedUser"] = None,
+ **kwargs
+ ):
+ super(PersonalComputeInstanceSettings, self).__init__(**kwargs)
+ self.assigned_user = assigned_user
+
+
+class PrivateEndpoint(msrest.serialization.Model):
+ """The Private Endpoint resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: The ARM identifier for Private Endpoint.
+ :vartype id: str
+ :ivar subnet_arm_id: The ARM identifier for Subnet resource that private endpoint links to.
+ :vartype subnet_arm_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'subnet_arm_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(PrivateEndpoint, self).__init__(**kwargs)
+ self.id = None
+ self.subnet_arm_id = None
+
+
+class PrivateEndpointConnection(Resource):
+ """The Private Endpoint Connection resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param private_endpoint: The resource of private end point.
+ :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint
+ :param private_link_service_connection_state: A collection of information about the state of
+ the connection between service consumer and provider.
+ :type private_link_service_connection_state:
+ ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState
+ :ivar provisioning_state: The provisioning state of the private endpoint connection resource.
+ Possible values include: "Succeeded", "Creating", "Deleting", "Failed".
+ :vartype provisioning_state: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'},
+ 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ private_endpoint: Optional["PrivateEndpoint"] = None,
+ private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None,
+ **kwargs
+ ):
+ super(PrivateEndpointConnection, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.private_endpoint = private_endpoint
+ self.private_link_service_connection_state = private_link_service_connection_state
+ self.provisioning_state = None
+
+
+class PrivateLinkResource(Resource):
+ """A private link resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar group_id: The private link resource group id.
+ :vartype group_id: str
+ :ivar required_members: The private link resource required member names.
+ :vartype required_members: list[str]
+ :param required_zone_names: The private link resource Private link DNS zone name.
+ :type required_zone_names: list[str]
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'group_id': {'readonly': True},
+ 'required_members': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'},
+ 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ required_zone_names: Optional[List[str]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.group_id = None
+ self.required_members = None
+ self.required_zone_names = required_zone_names
+
+
+class PrivateLinkResourceListResult(msrest.serialization.Model):
+ """A list of private link resources.
+
+ :param value: Array of private link resources.
+ :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource]
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[PrivateLinkResource]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["PrivateLinkResource"]] = None,
+ **kwargs
+ ):
+ super(PrivateLinkResourceListResult, self).__init__(**kwargs)
+ self.value = value
+
+
+class PrivateLinkServiceConnectionState(msrest.serialization.Model):
+ """A collection of information about the state of the connection between service consumer and provider.
+
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ :param description: The reason for approval/rejection of the connection.
+ :type description: str
+ :param actions_required: A message indicating if changes on the service provider require any
+ updates on the consumer.
+ :type actions_required: str
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'actions_required': {'key': 'actionsRequired', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ description: Optional[str] = None,
+ actions_required: Optional[str] = None,
+ **kwargs
+ ):
+ super(PrivateLinkServiceConnectionState, self).__init__(**kwargs)
+ self.status = status
+ self.description = description
+ self.actions_required = actions_required
+
+
+class QuotaBaseProperties(msrest.serialization.Model):
+ """The properties for Quota update or retrieval.
+
+ :param id: Specifies the resource ID.
+ :type id: str
+ :param type: Specifies the resource type.
+ :type type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :param unit: An enum describing the unit of quota measurement. Possible values include:
+ "Count".
+ :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: Optional[str] = None,
+ type: Optional[str] = None,
+ limit: Optional[int] = None,
+ unit: Optional[Union[str, "QuotaUnit"]] = None,
+ **kwargs
+ ):
+ super(QuotaBaseProperties, self).__init__(**kwargs)
+ self.id = id
+ self.type = type
+ self.limit = limit
+ self.unit = unit
+
+
+class QuotaUpdateParameters(msrest.serialization.Model):
+ """Quota update parameters.
+
+ :param value: The list for update quota.
+ :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties]
+ :param location: Region of workspace quota to be updated.
+ :type location: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'},
+ 'location': {'key': 'location', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["QuotaBaseProperties"]] = None,
+ location: Optional[str] = None,
+ **kwargs
+ ):
+ super(QuotaUpdateParameters, self).__init__(**kwargs)
+ self.value = value
+ self.location = location
+
+
+class RCranPackage(msrest.serialization.Model):
+ """RCranPackage.
+
+ :param name: The package name.
+ :type name: str
+ :param repository: The repository name.
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ repository: Optional[str] = None,
+ **kwargs
+ ):
+ super(RCranPackage, self).__init__(**kwargs)
+ self.name = name
+ self.repository = repository
+
+
+class RegistryListCredentialsResult(msrest.serialization.Model):
+ """RegistryListCredentialsResult.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location:
+ :vartype location: str
+ :ivar username:
+ :vartype username: str
+ :param passwords:
+ :type passwords: list[~azure_machine_learning_workspaces.models.Password]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'username': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'username': {'key': 'username', 'type': 'str'},
+ 'passwords': {'key': 'passwords', 'type': '[Password]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ passwords: Optional[List["Password"]] = None,
+ **kwargs
+ ):
+ super(RegistryListCredentialsResult, self).__init__(**kwargs)
+ self.location = None
+ self.username = None
+ self.passwords = passwords
+
+
+class ResourceId(msrest.serialization.Model):
+ """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. The ID of the resource.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(ResourceId, self).__init__(**kwargs)
+ self.id = id
+
+
+class ResourceName(msrest.serialization.Model):
+ """The Resource Name.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class ResourceQuota(msrest.serialization.Model):
+ """The quota assigned to a resource.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar name: Name of the resource.
+ :vartype name: ~azure_machine_learning_workspaces.models.ResourceName
+ :ivar limit: The maximum permitted quota of the resource.
+ :vartype limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'ResourceName'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceQuota, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.name = None
+ self.limit = None
+ self.unit = None
+
+
+class ResourceSkuLocationInfo(msrest.serialization.Model):
+ """ResourceSkuLocationInfo.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar location: Location of the SKU.
+ :vartype location: str
+ :ivar zones: List of availability zones where the SKU is supported.
+ :vartype zones: list[str]
+ :ivar zone_details: Details of capabilities available to a SKU in specific zones.
+ :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails]
+ """
+
+ _validation = {
+ 'location': {'readonly': True},
+ 'zones': {'readonly': True},
+ 'zone_details': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'location': {'key': 'location', 'type': 'str'},
+ 'zones': {'key': 'zones', 'type': '[str]'},
+ 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuLocationInfo, self).__init__(**kwargs)
+ self.location = None
+ self.zones = None
+ self.zone_details = None
+
+
+class ResourceSkuZoneDetails(msrest.serialization.Model):
+ """Describes The zonal capabilities of a SKU.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The set of zones that the SKU is available in with the specified capabilities.
+ :vartype name: list[str]
+ :ivar capabilities: A list of capabilities that are available for the SKU in the specified list
+ of zones.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': '[str]'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ResourceSkuZoneDetails, self).__init__(**kwargs)
+ self.name = None
+ self.capabilities = None
+
+
+class Restriction(msrest.serialization.Model):
+ """The restriction because of which SKU cannot be used.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar type: The type of restrictions. As of now only possible value for this is location.
+ :vartype type: str
+ :ivar values: The value of restrictions. If the restriction type is set to location. This would
+ be different locations where the SKU is restricted.
+ :vartype values: list[str]
+ :param reason_code: The reason for the restriction. Possible values include: "NotSpecified",
+ "NotAvailableForRegion", "NotAvailableForSubscription".
+ :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode
+ """
+
+ _validation = {
+ 'type': {'readonly': True},
+ 'values': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'type': {'key': 'type', 'type': 'str'},
+ 'values': {'key': 'values', 'type': '[str]'},
+ 'reason_code': {'key': 'reasonCode', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ reason_code: Optional[Union[str, "ReasonCode"]] = None,
+ **kwargs
+ ):
+ super(Restriction, self).__init__(**kwargs)
+ self.type = None
+ self.values = None
+ self.reason_code = reason_code
+
+
+class RGitHubPackage(msrest.serialization.Model):
+ """RGitHubPackage.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ :param auth_token: Personal access token to install from a private repo.
+ :type auth_token: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ 'auth_token': {'key': 'authToken', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repository: Optional[str] = None,
+ auth_token: Optional[str] = None,
+ **kwargs
+ ):
+ super(RGitHubPackage, self).__init__(**kwargs)
+ self.repository = repository
+ self.auth_token = auth_token
+
+
+class RGitHubPackageResponse(msrest.serialization.Model):
+ """RGitHubPackageResponse.
+
+ :param repository: Repository address in the format username/repo[/subdir][@ref|#pull].
+ :type repository: str
+ """
+
+ _attribute_map = {
+ 'repository': {'key': 'repository', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ repository: Optional[str] = None,
+ **kwargs
+ ):
+ super(RGitHubPackageResponse, self).__init__(**kwargs)
+ self.repository = repository
+
+
+class ScaleSettings(msrest.serialization.Model):
+ """scale settings for AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param max_node_count: Required. Max number of nodes to use.
+ :type max_node_count: int
+ :param min_node_count: Min number of nodes to use.
+ :type min_node_count: int
+ :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This
+ string needs to be in the RFC Format.
+ :type node_idle_time_before_scale_down: ~datetime.timedelta
+ """
+
+ _validation = {
+ 'max_node_count': {'required': True},
+ }
+
+ _attribute_map = {
+ 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'},
+ 'min_node_count': {'key': 'minNodeCount', 'type': 'int'},
+ 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'},
+ }
+
+ def __init__(
+ self,
+ *,
+ max_node_count: int,
+ min_node_count: Optional[int] = 0,
+ node_idle_time_before_scale_down: Optional[datetime.timedelta] = None,
+ **kwargs
+ ):
+ super(ScaleSettings, self).__init__(**kwargs)
+ self.max_node_count = max_node_count
+ self.min_node_count = min_node_count
+ self.node_idle_time_before_scale_down = node_idle_time_before_scale_down
+
+
+class ScriptReference(msrest.serialization.Model):
+ """Script reference.
+
+ :param script_source: The storage source of the script: inline, workspace.
+ :type script_source: str
+ :param script_data: The location of scripts in the mounted volume.
+ :type script_data: str
+ :param script_arguments: Optional command line arguments passed to the script to run.
+ :type script_arguments: str
+ :param timeout: Optional time period passed to timeout command.
+ :type timeout: str
+ """
+
+ _attribute_map = {
+ 'script_source': {'key': 'scriptSource', 'type': 'str'},
+ 'script_data': {'key': 'scriptData', 'type': 'str'},
+ 'script_arguments': {'key': 'scriptArguments', 'type': 'str'},
+ 'timeout': {'key': 'timeout', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ script_source: Optional[str] = None,
+ script_data: Optional[str] = None,
+ script_arguments: Optional[str] = None,
+ timeout: Optional[str] = None,
+ **kwargs
+ ):
+ super(ScriptReference, self).__init__(**kwargs)
+ self.script_source = script_source
+ self.script_data = script_data
+ self.script_arguments = script_arguments
+ self.timeout = timeout
+
+
+class ScriptsToExecute(msrest.serialization.Model):
+ """Customized setup scripts.
+
+ :param startup_script: Script that's run every time the machine starts.
+ :type startup_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ :param creation_script: Script that's run only once during provision of the compute.
+ :type creation_script: ~azure_machine_learning_workspaces.models.ScriptReference
+ """
+
+ _attribute_map = {
+ 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'},
+ 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'},
+ }
+
+ def __init__(
+ self,
+ *,
+ startup_script: Optional["ScriptReference"] = None,
+ creation_script: Optional["ScriptReference"] = None,
+ **kwargs
+ ):
+ super(ScriptsToExecute, self).__init__(**kwargs)
+ self.startup_script = startup_script
+ self.creation_script = creation_script
+
+
+class ServiceManagedResourcesSettings(msrest.serialization.Model):
+ """ServiceManagedResourcesSettings.
+
+ :param cosmos_db: The settings for the service managed cosmosdb account.
+ :type cosmos_db: ~azure_machine_learning_workspaces.models.CosmosDbSettings
+ """
+
+ _attribute_map = {
+ 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'},
+ }
+
+ def __init__(
+ self,
+ *,
+ cosmos_db: Optional["CosmosDbSettings"] = None,
+ **kwargs
+ ):
+ super(ServiceManagedResourcesSettings, self).__init__(**kwargs)
+ self.cosmos_db = cosmos_db
+
+
+class ServicePrincipalCredentials(msrest.serialization.Model):
+ """Service principal credentials.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param client_id: Required. Client Id.
+ :type client_id: str
+ :param client_secret: Required. Client secret.
+ :type client_secret: str
+ """
+
+ _validation = {
+ 'client_id': {'required': True},
+ 'client_secret': {'required': True},
+ }
+
+ _attribute_map = {
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ 'client_secret': {'key': 'clientSecret', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ client_id: str,
+ client_secret: str,
+ **kwargs
+ ):
+ super(ServicePrincipalCredentials, self).__init__(**kwargs)
+ self.client_id = client_id
+ self.client_secret = client_secret
+
+
+class ServiceResource(Resource):
+ """Machine Learning service object wrapped into ARM resource envelope.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :param properties: Service properties.
+ :type properties: ~azure_machine_learning_workspaces.models.ServiceResponseBase
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'properties': {'key': 'properties', 'type': 'ServiceResponseBase'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ properties: Optional["ServiceResponseBase"] = None,
+ **kwargs
+ ):
+ super(ServiceResource, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.properties = properties
+
+
+class ServiceResponseBaseError(MachineLearningServiceError):
+ """The error details.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar error: The error response.
+ :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse
+ """
+
+ _validation = {
+ 'error': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'error': {'key': 'error', 'type': 'ErrorResponse'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(ServiceResponseBaseError, self).__init__(**kwargs)
+
+
+class SetupScripts(msrest.serialization.Model):
+ """Details of customized scripts to execute for setting up the cluster.
+
+ :param scripts: Customized setup scripts.
+ :type scripts: ~azure_machine_learning_workspaces.models.ScriptsToExecute
+ """
+
+ _attribute_map = {
+ 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'},
+ }
+
+ def __init__(
+ self,
+ *,
+ scripts: Optional["ScriptsToExecute"] = None,
+ **kwargs
+ ):
+ super(SetupScripts, self).__init__(**kwargs)
+ self.scripts = scripts
+
+
+class SharedPrivateLinkResource(msrest.serialization.Model):
+ """SharedPrivateLinkResource.
+
+ :param name: Unique name of the private link.
+ :type name: str
+ :param private_link_resource_id: The resource id that private link links to.
+ :type private_link_resource_id: str
+ :param group_id: The private link resource group id.
+ :type group_id: str
+ :param request_message: Request message.
+ :type request_message: str
+ :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner
+ of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected",
+ "Timeout".
+ :type status: str or
+ ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'},
+ 'group_id': {'key': 'properties.groupId', 'type': 'str'},
+ 'request_message': {'key': 'properties.requestMessage', 'type': 'str'},
+ 'status': {'key': 'properties.status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ private_link_resource_id: Optional[str] = None,
+ group_id: Optional[str] = None,
+ request_message: Optional[str] = None,
+ status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None,
+ **kwargs
+ ):
+ super(SharedPrivateLinkResource, self).__init__(**kwargs)
+ self.name = name
+ self.private_link_resource_id = private_link_resource_id
+ self.group_id = group_id
+ self.request_message = request_message
+ self.status = status
+
+
+class Sku(msrest.serialization.Model):
+ """Sku of the resource.
+
+ :param name: Name of the sku.
+ :type name: str
+ :param tier: Tier of the sku like Basic or Enterprise.
+ :type tier: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ tier: Optional[str] = None,
+ **kwargs
+ ):
+ super(Sku, self).__init__(**kwargs)
+ self.name = name
+ self.tier = tier
+
+
+class SkuCapability(msrest.serialization.Model):
+ """Features/user capabilities associated with the sku.
+
+ :param name: Capability/Feature ID.
+ :type name: str
+ :param value: Details about the feature/capability.
+ :type value: str
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'value': {'key': 'value', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ value: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuCapability, self).__init__(**kwargs)
+ self.name = name
+ self.value = value
+
+
+class SkuListResult(msrest.serialization.Model):
+ """List of skus with features.
+
+ :param value:
+ :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku]
+ :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this
+ URI to fetch the next page of Workspace Skus.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[WorkspaceSku]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["WorkspaceSku"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(SkuListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class SparkMavenPackage(msrest.serialization.Model):
+ """SparkMavenPackage.
+
+ :param group:
+ :type group: str
+ :param artifact:
+ :type artifact: str
+ :param version:
+ :type version: str
+ """
+
+ _attribute_map = {
+ 'group': {'key': 'group', 'type': 'str'},
+ 'artifact': {'key': 'artifact', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ group: Optional[str] = None,
+ artifact: Optional[str] = None,
+ version: Optional[str] = None,
+ **kwargs
+ ):
+ super(SparkMavenPackage, self).__init__(**kwargs)
+ self.group = group
+ self.artifact = artifact
+ self.version = version
+
+
+class SslConfiguration(msrest.serialization.Model):
+ """The ssl configuration for scoring.
+
+ :param status: Enable or disable ssl for scoring. Possible values include: "Disabled",
+ "Enabled", "Auto".
+ :type status: str or ~azure_machine_learning_workspaces.models.SslConfigurationStatus
+ :param cert: Cert data.
+ :type cert: str
+ :param key: Key data.
+ :type key: str
+ :param cname: CNAME of the cert.
+ :type cname: str
+ :param leaf_domain_label: Leaf domain label of public endpoint.
+ :type leaf_domain_label: str
+ :param overwrite_existing_domain: Indicates whether to overwrite existing domain label.
+ :type overwrite_existing_domain: bool
+ """
+
+ _attribute_map = {
+ 'status': {'key': 'status', 'type': 'str'},
+ 'cert': {'key': 'cert', 'type': 'str'},
+ 'key': {'key': 'key', 'type': 'str'},
+ 'cname': {'key': 'cname', 'type': 'str'},
+ 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'},
+ 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ status: Optional[Union[str, "SslConfigurationStatus"]] = None,
+ cert: Optional[str] = None,
+ key: Optional[str] = None,
+ cname: Optional[str] = None,
+ leaf_domain_label: Optional[str] = None,
+ overwrite_existing_domain: Optional[bool] = None,
+ **kwargs
+ ):
+ super(SslConfiguration, self).__init__(**kwargs)
+ self.status = status
+ self.cert = cert
+ self.key = key
+ self.cname = cname
+ self.leaf_domain_label = leaf_domain_label
+ self.overwrite_existing_domain = overwrite_existing_domain
+
+
+class SynapseSparkPoolProperties(msrest.serialization.Model):
+ """Properties specific to Synapse Spark pools.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ """
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["SynapseSparkPoolPropertiesautogenerated"] = None,
+ **kwargs
+ ):
+ super(SynapseSparkPoolProperties, self).__init__(**kwargs)
+ self.properties = properties
+
+
+class SynapseSpark(Compute, SynapseSparkPoolProperties):
+ """A SynapseSpark compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param properties: AKS properties.
+ :type properties:
+ ~azure_machine_learning_workspaces.models.SynapseSparkPoolPropertiesautogenerated
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'properties': {'key': 'properties', 'type': 'SynapseSparkPoolPropertiesautogenerated'},
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ properties: Optional["SynapseSparkPoolPropertiesautogenerated"] = None,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ **kwargs
+ ):
+ super(SynapseSpark, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs)
+ self.properties = properties
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_type = 'SynapseSpark' # type: str
+ self.compute_location = compute_location
+ self.provisioning_state = None
+ self.description = description
+ self.created_on = None
+ self.modified_on = None
+ self.resource_id = resource_id
+ self.provisioning_errors = None
+ self.is_attached_compute = None
+ self.disable_local_auth = disable_local_auth
+
+
+class SynapseSparkPoolPropertiesautogenerated(msrest.serialization.Model):
+ """AKS properties.
+
+ :param auto_scale_properties: Auto scale properties.
+ :type auto_scale_properties: ~azure_machine_learning_workspaces.models.AutoScaleProperties
+ :param auto_pause_properties: Auto pause properties.
+ :type auto_pause_properties: ~azure_machine_learning_workspaces.models.AutoPauseProperties
+ :param spark_version: Spark version.
+ :type spark_version: str
+ :param node_count: The number of compute nodes currently assigned to the compute.
+ :type node_count: int
+ :param node_size: Node size.
+ :type node_size: str
+ :param node_size_family: Node size family.
+ :type node_size_family: str
+ :param subscription_id: Azure subscription identifier.
+ :type subscription_id: str
+ :param resource_group: Name of the resource group in which workspace is located.
+ :type resource_group: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param pool_name: Pool name.
+ :type pool_name: str
+ """
+
+ _attribute_map = {
+ 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'},
+ 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'},
+ 'spark_version': {'key': 'sparkVersion', 'type': 'str'},
+ 'node_count': {'key': 'nodeCount', 'type': 'int'},
+ 'node_size': {'key': 'nodeSize', 'type': 'str'},
+ 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'},
+ 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
+ 'resource_group': {'key': 'resourceGroup', 'type': 'str'},
+ 'workspace_name': {'key': 'workspaceName', 'type': 'str'},
+ 'pool_name': {'key': 'poolName', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ auto_scale_properties: Optional["AutoScaleProperties"] = None,
+ auto_pause_properties: Optional["AutoPauseProperties"] = None,
+ spark_version: Optional[str] = None,
+ node_count: Optional[int] = None,
+ node_size: Optional[str] = None,
+ node_size_family: Optional[str] = None,
+ subscription_id: Optional[str] = None,
+ resource_group: Optional[str] = None,
+ workspace_name: Optional[str] = None,
+ pool_name: Optional[str] = None,
+ **kwargs
+ ):
+ super(SynapseSparkPoolPropertiesautogenerated, self).__init__(**kwargs)
+ self.auto_scale_properties = auto_scale_properties
+ self.auto_pause_properties = auto_pause_properties
+ self.spark_version = spark_version
+ self.node_count = node_count
+ self.node_size = node_size
+ self.node_size_family = node_size_family
+ self.subscription_id = subscription_id
+ self.resource_group = resource_group
+ self.workspace_name = workspace_name
+ self.pool_name = pool_name
+
+
+class SystemData(msrest.serialization.Model):
+ """Read only system data.
+
+ :param created_by: An identifier for the identity that created the resource.
+ :type created_by: str
+ :param created_by_type: The type of identity that created the resource. Possible values
+ include: "User", "Application", "ManagedIdentity", "Key".
+ :type created_by_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param created_at: The timestamp of resource creation (UTC).
+ :type created_at: ~datetime.datetime
+ :param last_modified_by: An identifier for the identity that last modified the resource.
+ :type last_modified_by: str
+ :param last_modified_by_type: The type of identity that last modified the resource. Possible
+ values include: "User", "Application", "ManagedIdentity", "Key".
+ :type last_modified_by_type: str or ~azure_machine_learning_workspaces.models.IdentityType
+ :param last_modified_at: The timestamp of resource last modification (UTC).
+ :type last_modified_at: ~datetime.datetime
+ """
+
+ _attribute_map = {
+ 'created_by': {'key': 'createdBy', 'type': 'str'},
+ 'created_by_type': {'key': 'createdByType', 'type': 'str'},
+ 'created_at': {'key': 'createdAt', 'type': 'iso-8601'},
+ 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'},
+ 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'},
+ 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'},
+ }
+
+ def __init__(
+ self,
+ *,
+ created_by: Optional[str] = None,
+ created_by_type: Optional[Union[str, "IdentityType"]] = None,
+ created_at: Optional[datetime.datetime] = None,
+ last_modified_by: Optional[str] = None,
+ last_modified_by_type: Optional[Union[str, "IdentityType"]] = None,
+ last_modified_at: Optional[datetime.datetime] = None,
+ **kwargs
+ ):
+ super(SystemData, self).__init__(**kwargs)
+ self.created_by = created_by
+ self.created_by_type = created_by_type
+ self.created_at = created_at
+ self.last_modified_by = last_modified_by
+ self.last_modified_by_type = last_modified_by_type
+ self.last_modified_at = last_modified_at
+
+
+class SystemService(msrest.serialization.Model):
+ """A system service running on a compute.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar system_service_type: The type of this system service.
+ :vartype system_service_type: str
+ :ivar public_ip_address: Public IP address.
+ :vartype public_ip_address: str
+ :ivar version: The version for this type.
+ :vartype version: str
+ """
+
+ _validation = {
+ 'system_service_type': {'readonly': True},
+ 'public_ip_address': {'readonly': True},
+ 'version': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'system_service_type': {'key': 'systemServiceType', 'type': 'str'},
+ 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'},
+ 'version': {'key': 'version', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(SystemService, self).__init__(**kwargs)
+ self.system_service_type = None
+ self.public_ip_address = None
+ self.version = None
+
+
+class UpdateWorkspaceQuotas(msrest.serialization.Model):
+ """The properties for update Quota response.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :param limit: The maximum permitted quota of the resource.
+ :type limit: long
+ :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit
+ :param status: Status of update workspace quota. Possible values include: "Undefined",
+ "Success", "Failure", "InvalidQuotaBelowClusterMinimum",
+ "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku",
+ "OperationNotEnabledForRegion".
+ :type status: str or ~azure_machine_learning_workspaces.models.Status
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'status': {'key': 'status', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ limit: Optional[int] = None,
+ status: Optional[Union[str, "Status"]] = None,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotas, self).__init__(**kwargs)
+ self.id = None
+ self.type = None
+ self.limit = limit
+ self.unit = None
+ self.status = status
+
+
+class UpdateWorkspaceQuotasResult(msrest.serialization.Model):
+ """The result of update workspace quota.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The list of workspace quota update result.
+ :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas]
+ :ivar next_link: The URI to fetch the next page of workspace quota update result. Call
+ ListNext() with this to fetch the next page of Workspace Quota update result.
+ :vartype next_link: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'next_link': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs)
+ self.value = None
+ self.next_link = None
+
+
+class Usage(msrest.serialization.Model):
+ """Describes AML Resource Usage.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar aml_workspace_location: Region of the AML workspace in the id.
+ :vartype aml_workspace_location: str
+ :ivar type: Specifies the resource type.
+ :vartype type: str
+ :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count".
+ :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit
+ :ivar current_value: The current usage of the resource.
+ :vartype current_value: long
+ :ivar limit: The maximum permitted usage of the resource.
+ :vartype limit: long
+ :ivar name: The name of the type of usage.
+ :vartype name: ~azure_machine_learning_workspaces.models.UsageName
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'aml_workspace_location': {'readonly': True},
+ 'type': {'readonly': True},
+ 'unit': {'readonly': True},
+ 'current_value': {'readonly': True},
+ 'limit': {'readonly': True},
+ 'name': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'unit': {'key': 'unit', 'type': 'str'},
+ 'current_value': {'key': 'currentValue', 'type': 'long'},
+ 'limit': {'key': 'limit', 'type': 'long'},
+ 'name': {'key': 'name', 'type': 'UsageName'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(Usage, self).__init__(**kwargs)
+ self.id = None
+ self.aml_workspace_location = None
+ self.type = None
+ self.unit = None
+ self.current_value = None
+ self.limit = None
+ self.name = None
+
+
+class UsageName(msrest.serialization.Model):
+ """The Usage Names.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar value: The name of the resource.
+ :vartype value: str
+ :ivar localized_value: The localized name of the resource.
+ :vartype localized_value: str
+ """
+
+ _validation = {
+ 'value': {'readonly': True},
+ 'localized_value': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': 'str'},
+ 'localized_value': {'key': 'localizedValue', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UsageName, self).__init__(**kwargs)
+ self.value = None
+ self.localized_value = None
+
+
+class UserAccountCredentials(msrest.serialization.Model):
+ """Settings for user account that gets created on each on the nodes of a compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param admin_user_name: Required. Name of the administrator user account which can be used to
+ SSH to nodes.
+ :type admin_user_name: str
+ :param admin_user_ssh_public_key: SSH public key of the administrator user account.
+ :type admin_user_ssh_public_key: str
+ :param admin_user_password: Password of the administrator user account.
+ :type admin_user_password: str
+ """
+
+ _validation = {
+ 'admin_user_name': {'required': True},
+ }
+
+ _attribute_map = {
+ 'admin_user_name': {'key': 'adminUserName', 'type': 'str'},
+ 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'},
+ 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ admin_user_name: str,
+ admin_user_ssh_public_key: Optional[str] = None,
+ admin_user_password: Optional[str] = None,
+ **kwargs
+ ):
+ super(UserAccountCredentials, self).__init__(**kwargs)
+ self.admin_user_name = admin_user_name
+ self.admin_user_ssh_public_key = admin_user_ssh_public_key
+ self.admin_user_password = admin_user_password
+
+
+class UserAssignedIdentity(msrest.serialization.Model):
+ """User Assigned Identity.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar principal_id: The principal ID of the user assigned identity.
+ :vartype principal_id: str
+ :ivar tenant_id: The tenant ID of the user assigned identity.
+ :vartype tenant_id: str
+ :ivar client_id: The clientId(aka appId) of the user assigned identity.
+ :vartype client_id: str
+ """
+
+ _validation = {
+ 'principal_id': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ 'client_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'principal_id': {'key': 'principalId', 'type': 'str'},
+ 'tenant_id': {'key': 'tenantId', 'type': 'str'},
+ 'client_id': {'key': 'clientId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ **kwargs
+ ):
+ super(UserAssignedIdentity, self).__init__(**kwargs)
+ self.principal_id = None
+ self.tenant_id = None
+ self.client_id = None
+
+
+class VirtualMachine(Compute):
+ """A Machine Learning compute based on Azure Virtual Machines.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param compute_location: Location for the underlying compute.
+ :type compute_location: str
+ :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown,
+ Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating",
+ "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param description: The description of the Machine Learning compute.
+ :type description: str
+ :ivar created_on: The time at which the compute was created.
+ :vartype created_on: ~datetime.datetime
+ :ivar modified_on: The time at which the compute was last modified.
+ :vartype modified_on: ~datetime.datetime
+ :param resource_id: ARM resource id of the underlying compute.
+ :type resource_id: str
+ :ivar provisioning_errors: Errors during provisioning.
+ :vartype provisioning_errors:
+ list[~azure_machine_learning_workspaces.models.MachineLearningServiceError]
+ :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought
+ from outside if true, or machine learning service provisioned it if false.
+ :vartype is_attached_compute: bool
+ :param disable_local_auth: Opt-out of local authentication and ensure customers can use only
+ MSI and AAD exclusively for authentication.
+ :type disable_local_auth: bool
+ :param properties:
+ :type properties: ~azure_machine_learning_workspaces.models.VirtualMachineProperties
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ 'provisioning_state': {'readonly': True},
+ 'created_on': {'readonly': True},
+ 'modified_on': {'readonly': True},
+ 'provisioning_errors': {'readonly': True},
+ 'is_attached_compute': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'compute_location': {'key': 'computeLocation', 'type': 'str'},
+ 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},
+ 'description': {'key': 'description', 'type': 'str'},
+ 'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
+ 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'},
+ 'resource_id': {'key': 'resourceId', 'type': 'str'},
+ 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'},
+ 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'},
+ 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'},
+ 'properties': {'key': 'properties', 'type': 'VirtualMachineProperties'},
+ }
+
+ def __init__(
+ self,
+ *,
+ compute_location: Optional[str] = None,
+ description: Optional[str] = None,
+ resource_id: Optional[str] = None,
+ disable_local_auth: Optional[bool] = None,
+ properties: Optional["VirtualMachineProperties"] = None,
+ **kwargs
+ ):
+ super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.properties = properties
+
+
+class VirtualMachineImage(msrest.serialization.Model):
+ """Virtual Machine image for Windows AML Compute.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param id: Required. Virtual Machine image path.
+ :type id: str
+ """
+
+ _validation = {
+ 'id': {'required': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ id: str,
+ **kwargs
+ ):
+ super(VirtualMachineImage, self).__init__(**kwargs)
+ self.id = id
+
+
+class VirtualMachineProperties(msrest.serialization.Model):
+ """VirtualMachineProperties.
+
+ :param virtual_machine_size: Virtual Machine size.
+ :type virtual_machine_size: str
+ :param ssh_port: Port open for ssh connections.
+ :type ssh_port: int
+ :param address: Public IP address of the virtual machine.
+ :type address: str
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ :param is_notebook_instance_compute: Indicates whether this compute will be used for running
+ notebooks.
+ :type is_notebook_instance_compute: bool
+ """
+
+ _attribute_map = {
+ 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'},
+ 'ssh_port': {'key': 'sshPort', 'type': 'int'},
+ 'address': {'key': 'address', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'},
+ }
+
+ def __init__(
+ self,
+ *,
+ virtual_machine_size: Optional[str] = None,
+ ssh_port: Optional[int] = None,
+ address: Optional[str] = None,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ is_notebook_instance_compute: Optional[bool] = None,
+ **kwargs
+ ):
+ super(VirtualMachineProperties, self).__init__(**kwargs)
+ self.virtual_machine_size = virtual_machine_size
+ self.ssh_port = ssh_port
+ self.address = address
+ self.administrator_account = administrator_account
+ self.is_notebook_instance_compute = is_notebook_instance_compute
+
+
+class VirtualMachineSecrets(ComputeSecrets):
+ """Secrets related to a Machine Learning compute based on AKS.
+
+ All required parameters must be populated in order to send to Azure.
+
+ :param compute_type: Required. The type of compute.Constant filled by server. Possible values
+ include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight",
+ "Databricks", "DataLakeAnalytics", "SynapseSpark".
+ :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType
+ :param administrator_account: Admin credentials for virtual machine.
+ :type administrator_account:
+ ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials
+ """
+
+ _validation = {
+ 'compute_type': {'required': True},
+ }
+
+ _attribute_map = {
+ 'compute_type': {'key': 'computeType', 'type': 'str'},
+ 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'},
+ }
+
+ def __init__(
+ self,
+ *,
+ administrator_account: Optional["VirtualMachineSshCredentials"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSecrets, self).__init__(**kwargs)
+ self.compute_type = 'VirtualMachine' # type: str
+ self.administrator_account = administrator_account
+
+
+class VirtualMachineSize(msrest.serialization.Model):
+ """Describes the properties of a VM size.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar name: The name of the virtual machine size.
+ :vartype name: str
+ :ivar family: The family name of the virtual machine size.
+ :vartype family: str
+ :ivar v_cp_us: The number of vCPUs supported by the virtual machine size.
+ :vartype v_cp_us: int
+ :ivar gpus: The number of gPUs supported by the virtual machine size.
+ :vartype gpus: int
+ :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size.
+ :vartype os_vhd_size_mb: int
+ :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine
+ size.
+ :vartype max_resource_volume_mb: int
+ :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size.
+ :vartype memory_gb: float
+ :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs.
+ :vartype low_priority_capable: bool
+ :ivar premium_io: Specifies if the virtual machine size supports premium IO.
+ :vartype premium_io: bool
+ :param estimated_vm_prices: The estimated price information for using a VM.
+ :type estimated_vm_prices: ~azure_machine_learning_workspaces.models.EstimatedVmPrices
+ """
+
+ _validation = {
+ 'name': {'readonly': True},
+ 'family': {'readonly': True},
+ 'v_cp_us': {'readonly': True},
+ 'gpus': {'readonly': True},
+ 'os_vhd_size_mb': {'readonly': True},
+ 'max_resource_volume_mb': {'readonly': True},
+ 'memory_gb': {'readonly': True},
+ 'low_priority_capable': {'readonly': True},
+ 'premium_io': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'family': {'key': 'family', 'type': 'str'},
+ 'v_cp_us': {'key': 'vCPUs', 'type': 'int'},
+ 'gpus': {'key': 'gpus', 'type': 'int'},
+ 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'},
+ 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'},
+ 'memory_gb': {'key': 'memoryGB', 'type': 'float'},
+ 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'},
+ 'premium_io': {'key': 'premiumIO', 'type': 'bool'},
+ 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVmPrices'},
+ }
+
+ def __init__(
+ self,
+ *,
+ estimated_vm_prices: Optional["EstimatedVmPrices"] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSize, self).__init__(**kwargs)
+ self.name = None
+ self.family = None
+ self.v_cp_us = None
+ self.gpus = None
+ self.os_vhd_size_mb = None
+ self.max_resource_volume_mb = None
+ self.memory_gb = None
+ self.low_priority_capable = None
+ self.premium_io = None
+ self.estimated_vm_prices = estimated_vm_prices
+
+
+class VirtualMachineSizeListResult(msrest.serialization.Model):
+ """The List Virtual Machine size operation response.
+
+ :param aml_compute: The list of virtual machine sizes supported by AmlCompute.
+ :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize]
+ """
+
+ _attribute_map = {
+ 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ aml_compute: Optional[List["VirtualMachineSize"]] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSizeListResult, self).__init__(**kwargs)
+ self.aml_compute = aml_compute
+
+
+class VirtualMachineSshCredentials(msrest.serialization.Model):
+ """Admin credentials for virtual machine.
+
+ :param username: Username of admin account.
+ :type username: str
+ :param password: Password of admin account.
+ :type password: str
+ :param public_key_data: Public key data.
+ :type public_key_data: str
+ :param private_key_data: Private key data.
+ :type private_key_data: str
+ """
+
+ _attribute_map = {
+ 'username': {'key': 'username', 'type': 'str'},
+ 'password': {'key': 'password', 'type': 'str'},
+ 'public_key_data': {'key': 'publicKeyData', 'type': 'str'},
+ 'private_key_data': {'key': 'privateKeyData', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ username: Optional[str] = None,
+ password: Optional[str] = None,
+ public_key_data: Optional[str] = None,
+ private_key_data: Optional[str] = None,
+ **kwargs
+ ):
+ super(VirtualMachineSshCredentials, self).__init__(**kwargs)
+ self.username = username
+ self.password = password
+ self.public_key_data = public_key_data
+ self.private_key_data = private_key_data
+
+
+class Workspace(Resource):
+ """An object that represents a machine learning workspace.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: Specifies the resource ID.
+ :vartype id: str
+ :ivar name: Specifies the name of the resource.
+ :vartype name: str
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param location: Specifies the location of the resource.
+ :type location: str
+ :ivar type: Specifies the type of the resource.
+ :vartype type: str
+ :param tags: A set of tags. Contains resource tags defined as key/value pairs.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :ivar system_data: Read only system data.
+ :vartype system_data: ~azure_machine_learning_workspaces.models.SystemData
+ :ivar workspace_id: The immutable id associated with this workspace.
+ :vartype workspace_id: str
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace. This name in mutable.
+ :type friendly_name: str
+ :param key_vault: ARM id of the key vault associated with this workspace. This cannot be
+ changed once the workspace has been created.
+ :type key_vault: str
+ :param application_insights: ARM id of the application insights associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type application_insights: str
+ :param container_registry: ARM id of the container registry associated with this workspace.
+ This cannot be changed once the workspace has been created.
+ :type container_registry: str
+ :param storage_account: ARM id of the storage account associated with this workspace. This
+ cannot be changed once the workspace has been created.
+ :type storage_account: str
+ :param discovery_url: Url for the discovery service to identify regional endpoints for machine
+ learning experimentation services.
+ :type discovery_url: str
+ :ivar provisioning_state: The current deployment state of workspace resource. The
+ provisioningState is to indicate states for resource provisioning. Possible values include:
+ "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled".
+ :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState
+ :param encryption: The encryption settings of Azure ML workspace.
+ :type encryption: ~azure_machine_learning_workspaces.models.EncryptionProperty
+ :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data
+ collected by the service.
+ :type hbi_workspace: bool
+ :ivar service_provisioned_resource_group: The name of the managed resource group created by
+ workspace RP in customer subscription if the workspace is CMK workspace.
+ :vartype service_provisioned_resource_group: str
+ :ivar private_link_count: Count of private connections in the workspace.
+ :vartype private_link_count: int
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public
+ access when behind VNet.
+ :type allow_public_access_when_behind_vnet: bool
+ :ivar private_endpoint_connections: The list of private endpoint connections in the workspace.
+ :vartype private_endpoint_connections:
+ list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection]
+ :param shared_private_link_resources: The list of shared private link resources in this
+ workspace.
+ :type shared_private_link_resources:
+ list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource]
+ :ivar notebook_info: The notebook info of Azure ML workspace.
+ :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ :ivar tenant_id: The tenant id associated with this workspace.
+ :vartype tenant_id: str
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ 'system_data': {'readonly': True},
+ 'workspace_id': {'readonly': True},
+ 'provisioning_state': {'readonly': True},
+ 'service_provisioned_resource_group': {'readonly': True},
+ 'private_link_count': {'readonly': True},
+ 'private_endpoint_connections': {'readonly': True},
+ 'notebook_info': {'readonly': True},
+ 'tenant_id': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'location': {'key': 'location', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'system_data': {'key': 'systemData', 'type': 'SystemData'},
+ 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'key_vault': {'key': 'properties.keyVault', 'type': 'str'},
+ 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'},
+ 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'},
+ 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'},
+ 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'},
+ 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
+ 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'},
+ 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'},
+ 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'},
+ 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'},
+ 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'},
+ 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'},
+ 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ identity: Optional["Identity"] = None,
+ location: Optional[str] = None,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ key_vault: Optional[str] = None,
+ application_insights: Optional[str] = None,
+ container_registry: Optional[str] = None,
+ storage_account: Optional[str] = None,
+ discovery_url: Optional[str] = None,
+ encryption: Optional["EncryptionProperty"] = None,
+ hbi_workspace: Optional[bool] = False,
+ image_build_compute: Optional[str] = None,
+ allow_public_access_when_behind_vnet: Optional[bool] = False,
+ shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None,
+ service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None,
+ primary_user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(Workspace, self).__init__(identity=identity, location=location, tags=tags, sku=sku, **kwargs)
+ self.workspace_id = None
+ self.description = description
+ self.friendly_name = friendly_name
+ self.key_vault = key_vault
+ self.application_insights = application_insights
+ self.container_registry = container_registry
+ self.storage_account = storage_account
+ self.discovery_url = discovery_url
+ self.provisioning_state = None
+ self.encryption = encryption
+ self.hbi_workspace = hbi_workspace
+ self.service_provisioned_resource_group = None
+ self.private_link_count = None
+ self.image_build_compute = image_build_compute
+ self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet
+ self.private_endpoint_connections = None
+ self.shared_private_link_resources = shared_private_link_resources
+ self.notebook_info = None
+ self.service_managed_resources_settings = service_managed_resources_settings
+ self.primary_user_assigned_identity = primary_user_assigned_identity
+ self.tenant_id = None
+
+
+class WorkspaceConnection(msrest.serialization.Model):
+ """Workspace connection.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar id: ResourceId of the workspace connection.
+ :vartype id: str
+ :ivar name: Friendly name of the workspace connection.
+ :vartype name: str
+ :ivar type: Resource type of workspace connection.
+ :vartype type: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _validation = {
+ 'id': {'readonly': True},
+ 'name': {'readonly': True},
+ 'type': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'id': {'key': 'id', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'type': {'key': 'type', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ value_format: Optional[Union[str, "ValueFormat"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnection, self).__init__(**kwargs)
+ self.id = None
+ self.name = None
+ self.type = None
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+ self.value_format = value_format
+
+
+class WorkspaceConnectionDto(msrest.serialization.Model):
+ """object used for creating workspace connection.
+
+ :param name: Friendly name of the workspace connection.
+ :type name: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param auth_type: Authorization type of the workspace connection.
+ :type auth_type: str
+ :param value: Value details of the workspace connection.
+ :type value: str
+ :param value_format: format for the workspace connection value. Possible values include:
+ "JSON".
+ :type value_format: str or ~azure_machine_learning_workspaces.models.ValueFormat
+ """
+
+ _attribute_map = {
+ 'name': {'key': 'name', 'type': 'str'},
+ 'category': {'key': 'properties.category', 'type': 'str'},
+ 'target': {'key': 'properties.target', 'type': 'str'},
+ 'auth_type': {'key': 'properties.authType', 'type': 'str'},
+ 'value': {'key': 'properties.value', 'type': 'str'},
+ 'value_format': {'key': 'properties.valueFormat', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ name: Optional[str] = None,
+ category: Optional[str] = None,
+ target: Optional[str] = None,
+ auth_type: Optional[str] = None,
+ value: Optional[str] = None,
+ value_format: Optional[Union[str, "ValueFormat"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceConnectionDto, self).__init__(**kwargs)
+ self.name = name
+ self.category = category
+ self.target = target
+ self.auth_type = auth_type
+ self.value = value
+ self.value_format = value_format
+
+
+class WorkspaceListResult(msrest.serialization.Model):
+ """The result of a request to list machine learning workspaces.
+
+ :param value: The list of machine learning workspaces. Since this list may be incomplete, the
+ nextLink field should be used to request the next list of machine learning workspaces.
+ :type value: list[~azure_machine_learning_workspaces.models.Workspace]
+ :param next_link: The URI that can be used to request the next list of machine learning
+ workspaces.
+ :type next_link: str
+ """
+
+ _attribute_map = {
+ 'value': {'key': 'value', 'type': '[Workspace]'},
+ 'next_link': {'key': 'nextLink', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ value: Optional[List["Workspace"]] = None,
+ next_link: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceListResult, self).__init__(**kwargs)
+ self.value = value
+ self.next_link = next_link
+
+
+class WorkspaceSku(msrest.serialization.Model):
+ """Describes Workspace Sku details and features.
+
+ Variables are only populated by the server, and will be ignored when sending a request.
+
+ :ivar locations: The set of locations that the SKU is available. This will be supported and
+ registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.).
+ :vartype locations: list[str]
+ :ivar location_info: A list of locations and availability zones in those locations where the
+ SKU is available.
+ :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo]
+ :ivar tier: Sku Tier like Basic or Enterprise.
+ :vartype tier: str
+ :ivar resource_type:
+ :vartype resource_type: str
+ :ivar name:
+ :vartype name: str
+ :ivar capabilities: List of features/user capabilities associated with the sku.
+ :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability]
+ :param restrictions: The restrictions because of which SKU cannot be used. This is empty if
+ there are no restrictions.
+ :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction]
+ """
+
+ _validation = {
+ 'locations': {'readonly': True},
+ 'location_info': {'readonly': True},
+ 'tier': {'readonly': True},
+ 'resource_type': {'readonly': True},
+ 'name': {'readonly': True},
+ 'capabilities': {'readonly': True},
+ }
+
+ _attribute_map = {
+ 'locations': {'key': 'locations', 'type': '[str]'},
+ 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'},
+ 'tier': {'key': 'tier', 'type': 'str'},
+ 'resource_type': {'key': 'resourceType', 'type': 'str'},
+ 'name': {'key': 'name', 'type': 'str'},
+ 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'},
+ 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'},
+ }
+
+ def __init__(
+ self,
+ *,
+ restrictions: Optional[List["Restriction"]] = None,
+ **kwargs
+ ):
+ super(WorkspaceSku, self).__init__(**kwargs)
+ self.locations = None
+ self.location_info = None
+ self.tier = None
+ self.resource_type = None
+ self.name = None
+ self.capabilities = None
+ self.restrictions = restrictions
+
+
+class WorkspaceUpdateParameters(msrest.serialization.Model):
+ """The parameters for updating a machine learning workspace.
+
+ :param tags: A set of tags. The resource tags for the machine learning workspace.
+ :type tags: dict[str, str]
+ :param sku: The sku of the workspace.
+ :type sku: ~azure_machine_learning_workspaces.models.Sku
+ :param identity: The identity of the resource.
+ :type identity: ~azure_machine_learning_workspaces.models.Identity
+ :param description: The description of this workspace.
+ :type description: str
+ :param friendly_name: The friendly name for this workspace.
+ :type friendly_name: str
+ :param image_build_compute: The compute name for image build.
+ :type image_build_compute: str
+ :param service_managed_resources_settings: The service managed resource settings.
+ :type service_managed_resources_settings:
+ ~azure_machine_learning_workspaces.models.ServiceManagedResourcesSettings
+ :param primary_user_assigned_identity: The user assigned identity resource id that represents
+ the workspace identity.
+ :type primary_user_assigned_identity: str
+ """
+
+ _attribute_map = {
+ 'tags': {'key': 'tags', 'type': '{str}'},
+ 'sku': {'key': 'sku', 'type': 'Sku'},
+ 'identity': {'key': 'identity', 'type': 'Identity'},
+ 'description': {'key': 'properties.description', 'type': 'str'},
+ 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
+ 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'},
+ 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'},
+ 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'},
+ }
+
+ def __init__(
+ self,
+ *,
+ tags: Optional[Dict[str, str]] = None,
+ sku: Optional["Sku"] = None,
+ identity: Optional["Identity"] = None,
+ description: Optional[str] = None,
+ friendly_name: Optional[str] = None,
+ image_build_compute: Optional[str] = None,
+ service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None,
+ primary_user_assigned_identity: Optional[str] = None,
+ **kwargs
+ ):
+ super(WorkspaceUpdateParameters, self).__init__(**kwargs)
+ self.tags = tags
+ self.sku = sku
+ self.identity = identity
+ self.description = description
+ self.friendly_name = friendly_name
+ self.image_build_compute = image_build_compute
+ self.service_managed_resources_settings = service_managed_resources_settings
+ self.primary_user_assigned_identity = primary_user_assigned_identity
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
new file mode 100644
index 00000000000..7dc21ac7c33
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py
@@ -0,0 +1,39 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+
+from ._operations import Operations
+from ._workspaces_operations import WorkspacesOperations
+from ._workspace_features_operations import WorkspaceFeaturesOperations
+from ._usages_operations import UsagesOperations
+from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations
+from ._quotas_operations import QuotasOperations
+from ._machine_learning_compute_operations import MachineLearningComputeOperations
+from ._workspace_operations import WorkspaceOperations
+from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations
+from ._private_link_resources_operations import PrivateLinkResourcesOperations
+from ._machine_learning_service_operations import MachineLearningServiceOperations
+from ._notebooks_operations import NotebooksOperations
+from ._storage_account_operations import StorageAccountOperations
+from ._workspace_connections_operations import WorkspaceConnectionsOperations
+
+__all__ = [
+ 'Operations',
+ 'WorkspacesOperations',
+ 'WorkspaceFeaturesOperations',
+ 'UsagesOperations',
+ 'VirtualMachineSizesOperations',
+ 'QuotasOperations',
+ 'MachineLearningComputeOperations',
+ 'WorkspaceOperations',
+ 'PrivateEndpointConnectionsOperations',
+ 'PrivateLinkResourcesOperations',
+ 'MachineLearningServiceOperations',
+ 'NotebooksOperations',
+ 'StorageAccountOperations',
+ 'WorkspaceConnectionsOperations',
+]
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py
new file mode 100644
index 00000000000..d0371070250
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py
@@ -0,0 +1,1045 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningComputeOperations(object):
+ """MachineLearningComputeOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedComputeResourcesList"]
+ """Gets computes in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are
+ not returned - use 'keys' nested resource to get them.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ComputeResource')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 200:
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ComputeResource"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Creates or updates compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Payload with Machine Learning compute definition.
+ :type parameters: ~azure_machine_learning_workspaces.models.ComputeResource
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ response_headers = {}
+ response = pipeline_response.http_response
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeResource"
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'ClusterUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ parameters, # type: "models.ClusterUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ComputeResource"]
+ """Updates properties of a compute. This call will overwrite a compute if it exists. This is a
+ nonrecoverable operation.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param parameters: Additional parameters for cluster update.
+ :type parameters: ~azure_machine_learning_workspaces.models.ClusterUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ComputeResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ if response.status_code == 202:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+ response_headers['Location']=self._deserialize('str', response.headers.get('Location'))
+
+ if cls:
+ return cls(pipeline_response, None, response_headers)
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes specified Machine Learning compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the
+ underlying compute from workspace if 'Detach'.
+ :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ underlying_resource_action=underlying_resource_action,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore
+
+ def list_nodes(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.AmlComputeNodesInformation"]
+ """Get the details (e.g IP address, port etc) of all the compute nodes in the compute.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either AmlComputeNodesInformation or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.AmlComputeNodesInformation]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_nodes.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response)
+ list_of_elem = deserialized.nodes
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_nodes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ComputeSecrets"
+ """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc).
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ComputeSecrets, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ComputeSecrets', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore
+
+ def _start_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._start_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def begin_start(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a start action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._start_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore
+
+ def _stop_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._stop_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def begin_stop(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Posts a stop action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._stop_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ compute_name=compute_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore
+
+ def restart(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ compute_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Posts a restart action to a compute instance.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param compute_name: Name of the Azure Machine Learning compute.
+ :type compute_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.restart.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'computeName': self._serialize.url("compute_name", compute_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_service_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_service_operations.py
new file mode 100644
index 00000000000..38e59f5d64a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_service_operations.py
@@ -0,0 +1,444 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class MachineLearningServiceOperations(object):
+ """MachineLearningServiceOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ skip=None, # type: Optional[str]
+ model_id=None, # type: Optional[str]
+ model_name=None, # type: Optional[str]
+ tag=None, # type: Optional[str]
+ tags=None, # type: Optional[str]
+ properties=None, # type: Optional[str]
+ run_id=None, # type: Optional[str]
+ expand=None, # type: Optional[bool]
+ orderby="UpdatedAtDesc", # type: Optional[Union[str, "models.OrderString"]]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedServiceList"]
+ """Gets services in specified workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :param model_id: The Model Id.
+ :type model_id: str
+ :param model_name: The Model name.
+ :type model_name: str
+ :param tag: The object tag.
+ :type tag: str
+ :param tags: A set of tags with which to filter the returned services. It is a comma separated
+ string of tags key or tags key=value Example: tagKey1,tagKey2,tagKey3=value3 .
+ :type tags: str
+ :param properties: A set of properties with which to filter the returned services. It is a
+ comma separated string of properties key and/or properties key=value Example:
+ propKey1,propKey2,propKey3=value3 .
+ :type properties: str
+ :param run_id: runId for model associated with service.
+ :type run_id: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :param orderby: The option to order the response.
+ :type orderby: str or ~azure_machine_learning_workspaces.models.OrderString
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedServiceList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedServiceList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedServiceList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+ if model_id is not None:
+ query_parameters['modelId'] = self._serialize.query("model_id", model_id, 'str')
+ if model_name is not None:
+ query_parameters['modelName'] = self._serialize.query("model_name", model_name, 'str')
+ if tag is not None:
+ query_parameters['tag'] = self._serialize.query("tag", tag, 'str')
+ if tags is not None:
+ query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
+ if properties is not None:
+ query_parameters['properties'] = self._serialize.query("properties", properties, 'str')
+ if run_id is not None:
+ query_parameters['runId'] = self._serialize.query("run_id", run_id, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+ if orderby is not None:
+ query_parameters['orderby'] = self._serialize.query("orderby", orderby, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedServiceList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ expand=False, # type: Optional[bool]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ServiceResource"
+ """Get a Service by name.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param expand: Set to True to include Model details.
+ :type expand: bool
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ServiceResource, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ServiceResource
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if expand is not None:
+ query_parameters['expand'] = self._serialize.query("expand", expand, 'bool')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a specific Service..
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ properties, # type: "models.CreateServiceRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.ServiceResource"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ServiceResource"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'CreateServiceRequest')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ response_headers = {}
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if response.status_code == 201:
+ response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
+
+ if cls:
+ return cls(pipeline_response, deserialized, response_headers)
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ service_name, # type: str
+ properties, # type: "models.CreateServiceRequest"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.ServiceResource"]
+ """Creates or updates service. This call will update a service if it exists. This is a
+ nonrecoverable operation. If your intent is to create a new service, do a GET first to verify
+ that it does not exist yet.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param service_name: Name of the Azure Machine Learning service.
+ :type service_name: str
+ :param properties: The payload that is used to create or update the Service.
+ :type properties: ~azure_machine_learning_workspaces.models.CreateServiceRequest
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either ServiceResource or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ServiceResource]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ServiceResource"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ service_name=service_name,
+ properties=properties,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('ServiceResource', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'serviceName': self._serialize.url("service_name", service_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/services/{serviceName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py
new file mode 100644
index 00000000000..755a9420665
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebooks_operations.py
@@ -0,0 +1,226 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class NotebooksOperations(object):
+ """NotebooksOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def _prepare_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.NotebookResourceInfo"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._prepare_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def begin_prepare(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.NotebookResourceInfo"]
+ """prepare.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._prepare_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('NotebookResourceInfo', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListNotebookKeysResult"
+ """list_keys.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListNotebookKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListNotebookKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListNotebookKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
new file mode 100644
index 00000000000..e12378529a9
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operations.py
@@ -0,0 +1,110 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class Operations(object):
+ """Operations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.OperationListResult"]
+ """Lists all of the available Azure Machine Learning Workspaces REST API operations.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either OperationListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OperationListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('OperationListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
new file mode 100644
index 00000000000..9106c784b8f
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connections_operations.py
@@ -0,0 +1,245 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateEndpointConnectionsOperations(object):
+ """PrivateEndpointConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Gets the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def put(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ properties, # type: "models.PrivateEndpointConnection"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateEndpointConnection"
+ """Update the state of specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :param properties: The private endpoint connection properties.
+ :type properties: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateEndpointConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.put.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(properties, 'PrivateEndpointConnection')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ private_endpoint_connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Deletes the specified private endpoint connection associated with the workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param private_endpoint_connection_name: The name of the private endpoint connection associated
+ with the workspace.
+ :type private_endpoint_connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
new file mode 100644
index 00000000000..d76193be256
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resources_operations.py
@@ -0,0 +1,104 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class PrivateLinkResourcesOperations(object):
+ """PrivateLinkResourcesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_by_workspace(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.PrivateLinkResourceListResult"
+ """Gets the private link resources that need to be created for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: PrivateLinkResourceListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_by_workspace.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
new file mode 100644
index 00000000000..ccb3904ccc0
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quotas_operations.py
@@ -0,0 +1,182 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class QuotasOperations(object):
+ """QuotasOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def update(
+ self,
+ location, # type: str
+ parameters, # type: "models.QuotaUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.UpdateWorkspaceQuotasResult"
+ """Update quota for each VM family in workspace.
+
+ :param location: The location for update quota is queried.
+ :type location: str
+ :param parameters: Quota update parameters.
+ :type parameters: ~azure_machine_learning_workspaces.models.QuotaUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: UpdateWorkspaceQuotasResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'QuotaUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListWorkspaceQuotas"]
+ """Gets the currently assigned Workspace Quotas based on VMFamily.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_storage_account_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_storage_account_operations.py
new file mode 100644
index 00000000000..36d42231861
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_storage_account_operations.py
@@ -0,0 +1,105 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class StorageAccountOperations(object):
+ """StorageAccountOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListStorageAccountKeysResult"
+ """list_keys.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListStorageAccountKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListStorageAccountKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListStorageAccountKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
new file mode 100644
index 00000000000..dbbd5ba9c67
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usages_operations.py
@@ -0,0 +1,118 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class UsagesOperations(object):
+ """UsagesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListUsagesResult"]
+ """Gets the current usage information as well as limits for AML resources for given subscription
+ and location.
+
+ :param location: The location for which resource usage is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListUsagesResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListUsagesResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
new file mode 100644
index 00000000000..b38329d543c
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_sizes_operations.py
@@ -0,0 +1,100 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class VirtualMachineSizesOperations(object):
+ """VirtualMachineSizesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ location, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.VirtualMachineSizeListResult"
+ """Returns supported VM Sizes in a location.
+
+ :param location: The location upon which virtual-machine-sizes is queried.
+ :type location: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: VirtualMachineSizeListResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'),
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
new file mode 100644
index 00000000000..0d26c55985d
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connections_operations.py
@@ -0,0 +1,329 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceConnectionsOperations(object):
+ """WorkspaceConnectionsOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ target=None, # type: Optional[str]
+ category=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.PaginatedWorkspaceConnectionsList"]
+ """List all connections under a AML workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param target: Target of the workspace connection.
+ :type target: str
+ :param category: Category of the workspace connection.
+ :type category: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if target is not None:
+ query_parameters['target'] = self._serialize.query("target", target, 'str')
+ if category is not None:
+ query_parameters['category'] = self._serialize.query("category", category, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore
+
+ def create(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ parameters, # type: "models.WorkspaceConnectionDto"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Add a new workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :param parameters: The object for creating or updating a new workspace connection.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceConnectionDto
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.create.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceConnectionDto')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.WorkspaceConnection"
+ """Get the detail of a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: WorkspaceConnection, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('WorkspaceConnection', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
+
+ def delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ connection_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ """Delete a workspace connection.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param connection_name: Friendly name of the workspace connection.
+ :type connection_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: None, or the result of cls(response)
+ :rtype: None
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.delete.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ 'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
new file mode 100644
index 00000000000..138f1a93d4a
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_features_operations.py
@@ -0,0 +1,122 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceFeaturesOperations(object):
+ """WorkspaceFeaturesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.ListAmlUserFeatureResult"]
+ """Lists all enabled features for a workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py
new file mode 100644
index 00000000000..6e8687ea454
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py
@@ -0,0 +1,114 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.mgmt.core.exceptions import ARMErrorFormat
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspaceOperations(object):
+ """WorkspaceOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def list_skus(
+ self,
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.SkuListResult"]
+ """Lists all skus with associated features.
+
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either SkuListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.SkuListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_skus.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('SkuListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
new file mode 100644
index 00000000000..7d9c4f103d9
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspaces_operations.py
@@ -0,0 +1,802 @@
+# coding=utf-8
+# --------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# Code generated by Microsoft (R) AutoRest Code Generator.
+# Changes may cause incorrect behavior and will be lost if the code is regenerated.
+# --------------------------------------------------------------------------
+from typing import TYPE_CHECKING
+import warnings
+
+from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
+from azure.core.paging import ItemPaged
+from azure.core.pipeline import PipelineResponse
+from azure.core.pipeline.transport import HttpRequest, HttpResponse
+from azure.core.polling import LROPoller, NoPolling, PollingMethod
+from azure.mgmt.core.exceptions import ARMErrorFormat
+from azure.mgmt.core.polling.arm_polling import ARMPolling
+
+from .. import models
+
+if TYPE_CHECKING:
+ # pylint: disable=unused-import,ungrouped-imports
+ from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
+
+ T = TypeVar('T')
+ ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
+
+class WorkspacesOperations(object):
+ """WorkspacesOperations operations.
+
+ You should not instantiate this class directly. Instead, you should create a Client instance that
+ instantiates it for you and attaches it as an attribute.
+
+ :ivar models: Alias to model classes used in this operation group.
+ :type models: ~azure_machine_learning_workspaces.models
+ :param client: Client for service requests.
+ :param config: Configuration of service client.
+ :param serializer: An object model serializer.
+ :param deserializer: An object model deserializer.
+ """
+
+ models = models
+
+ def __init__(self, client, config, serializer, deserializer):
+ self._client = client
+ self._serialize = serializer
+ self._deserialize = deserializer
+ self._config = config
+
+ def get(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Gets the properties of the specified machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.get.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _create_or_update_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Optional["models.Workspace"]
+ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self._create_or_update_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'Workspace')
+ body_content_kwargs['content'] = body_content
+ request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 201, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = None
+ if response.status_code == 200:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if response.status_code == 201:
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_create_or_update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.Workspace"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller["models.Workspace"]
+ """Creates or updates a workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for creating or updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.Workspace
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either Workspace or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.Workspace]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._create_or_update_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ parameters=parameters,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+ return deserialized
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def _delete_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._delete_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.delete(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202, 204]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def begin_delete(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Deletes a machine learning workspace.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._delete_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def update(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ parameters, # type: "models.WorkspaceUpdateParameters"
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.Workspace"
+ """Updates a machine learning workspace with the specified parameters.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :param parameters: The parameters for updating a machine learning workspace.
+ :type parameters: ~azure_machine_learning_workspaces.models.WorkspaceUpdateParameters
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: Workspace, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.Workspace
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ content_type = kwargs.pop("content_type", "application/json")
+ accept = "application/json"
+
+ # Construct URL
+ url = self.update.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ body_content_kwargs = {} # type: Dict[str, Any]
+ body_content = self._serialize.body(parameters, 'WorkspaceUpdateParameters')
+ body_content_kwargs['content'] = body_content
+ request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('Workspace', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore
+
+ def list_by_resource_group(
+ self,
+ resource_group_name, # type: str
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified resource group.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_resource_group.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.ListWorkspaceKeysResult"
+ """Lists all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: ListWorkspaceKeysResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_keys.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore
+
+ def _resync_keys_initial(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> None
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self._resync_keys_initial.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200, 202]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ _resync_keys_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def begin_resync_keys(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> LROPoller[None]
+ """Resync all the keys associated with this workspace. This includes keys for the storage account,
+ app insights and password for container registry.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :keyword str continuation_token: A continuation token to restart a poller from a saved state.
+ :keyword polling: True for ARMPolling, False for no polling, or a
+ polling object for personal polling strategy
+ :paramtype polling: bool or ~azure.core.polling.PollingMethod
+ :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
+ :return: An instance of LROPoller that returns either None or the result of cls(response)
+ :rtype: ~azure.core.polling.LROPoller[None]
+ :raises ~azure.core.exceptions.HttpResponseError:
+ """
+ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
+ cls = kwargs.pop('cls', None) # type: ClsType[None]
+ lro_delay = kwargs.pop(
+ 'polling_interval',
+ self._config.polling_interval
+ )
+ cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
+ if cont_token is None:
+ raw_result = self._resync_keys_initial(
+ resource_group_name=resource_group_name,
+ workspace_name=workspace_name,
+ cls=lambda x,y,z: x,
+ **kwargs
+ )
+
+ kwargs.pop('error_map', None)
+ kwargs.pop('content_type', None)
+
+ def get_long_running_output(pipeline_response):
+ if cls:
+ return cls(pipeline_response, None, {})
+
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+
+ if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
+ elif polling is False: polling_method = NoPolling()
+ else: polling_method = polling
+ if cont_token:
+ return LROPoller.from_continuation_token(
+ polling_method=polling_method,
+ continuation_token=cont_token,
+ client=self._client,
+ deserialization_callback=get_long_running_output
+ )
+ else:
+ return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
+ begin_resync_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore
+
+ def list_by_subscription(
+ self,
+ skip=None, # type: Optional[str]
+ **kwargs # type: Any
+ ):
+ # type: (...) -> Iterable["models.WorkspaceListResult"]
+ """Lists all the available machine learning workspaces under the specified subscription.
+
+ :param skip: Continuation token for pagination.
+ :type skip: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: An iterator like instance of either WorkspaceListResult or the result of cls(response)
+ :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult]
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ def prepare_request(next_link=None):
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ if not next_link:
+ # Construct URL
+ url = self.list_by_subscription.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+ if skip is not None:
+ query_parameters['$skip'] = self._serialize.query("skip", skip, 'str')
+
+ request = self._client.get(url, query_parameters, header_parameters)
+ else:
+ url = next_link
+ query_parameters = {} # type: Dict[str, Any]
+ request = self._client.get(url, query_parameters, header_parameters)
+ return request
+
+ def extract_data(pipeline_response):
+ deserialized = self._deserialize('WorkspaceListResult', pipeline_response)
+ list_of_elem = deserialized.value
+ if cls:
+ list_of_elem = cls(list_of_elem)
+ return deserialized.next_link or None, iter(list_of_elem)
+
+ def get_next(next_link=None):
+ request = prepare_request(next_link)
+
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ return pipeline_response
+
+ return ItemPaged(
+ get_next, extract_data
+ )
+ list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore
+
+ def list_notebook_access_token(
+ self,
+ resource_group_name, # type: str
+ workspace_name, # type: str
+ **kwargs # type: Any
+ ):
+ # type: (...) -> "models.NotebookAccessTokenResult"
+ """return notebook access token and refresh token.
+
+ :param resource_group_name: Name of the resource group in which workspace is located.
+ :type resource_group_name: str
+ :param workspace_name: Name of Azure Machine Learning workspace.
+ :type workspace_name: str
+ :keyword callable cls: A custom type or function that will be passed the direct response
+ :return: NotebookAccessTokenResult, or the result of cls(response)
+ :rtype: ~azure_machine_learning_workspaces.models.NotebookAccessTokenResult
+ :raises: ~azure.core.exceptions.HttpResponseError
+ """
+ cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookAccessTokenResult"]
+ error_map = {
+ 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
+ }
+ error_map.update(kwargs.pop('error_map', {}))
+ api_version = "2021-04-01"
+ accept = "application/json"
+
+ # Construct URL
+ url = self.list_notebook_access_token.metadata['url'] # type: ignore
+ path_format_arguments = {
+ 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
+ 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
+ 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
+ }
+ url = self._client.format_url(url, **path_format_arguments)
+
+ # Construct parameters
+ query_parameters = {} # type: Dict[str, Any]
+ query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
+
+ # Construct headers
+ header_parameters = {} # type: Dict[str, Any]
+ header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
+
+ request = self._client.post(url, query_parameters, header_parameters)
+ pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
+ response = pipeline_response.http_response
+
+ if response.status_code not in [200]:
+ map_error(status_code=response.status_code, response=response, error_map=error_map)
+ error = self._deserialize(models.MachineLearningServiceError, response)
+ raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
+
+ deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response)
+
+ if cls:
+ return cls(pipeline_response, deserialized, {})
+
+ return deserialized
+ list_notebook_access_token.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken'} # type: ignore
diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
new file mode 100644
index 00000000000..e5aff4f83af
--- /dev/null
+++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed
@@ -0,0 +1 @@
+# Marker file for PEP 561.
\ No newline at end of file
diff --git a/src/machinelearningservices/report.md b/src/machinelearningservices/report.md
new file mode 100644
index 00000000000..a643eb2ee41
--- /dev/null
+++ b/src/machinelearningservices/report.md
@@ -0,0 +1,1200 @@
+# Azure CLI Module Creation Report
+
+## EXTENSION
+|CLI Extension|Command Groups|
+|---------|------------|
+|az machinelearningservices|[groups](#CommandGroups)
+
+## GROUPS
+### Command groups in `az machinelearningservices` extension
+|CLI Command Group|Group Swagger name|Commands|
+|---------|------------|--------|
+|az machinelearningservices workspace|Workspaces|[commands](#CommandsInWorkspaces)|
+|az machinelearningservices workspace-feature|WorkspaceFeatures|[commands](#CommandsInWorkspaceFeatures)|
+|az machinelearningservices usage|Usages|[commands](#CommandsInUsages)|
+|az machinelearningservices virtual-machine-size|VirtualMachineSizes|[commands](#CommandsInVirtualMachineSizes)|
+|az machinelearningservices quota|Quotas|[commands](#CommandsInQuotas)|
+|az machinelearningservices machine-learning-compute|MachineLearningCompute|[commands](#CommandsInMachineLearningCompute)|
+|az machinelearningservices workspace|Workspace|[commands](#CommandsInWorkspace)|
+|az machinelearningservices private-endpoint-connection|PrivateEndpointConnections|[commands](#CommandsInPrivateEndpointConnections)|
+|az machinelearningservices private-link-resource|PrivateLinkResources|[commands](#CommandsInPrivateLinkResources)|
+|az machinelearningservices machine-learning-service|MachineLearningService|[commands](#CommandsInMachineLearningService)|
+|az machinelearningservices notebook|Notebooks|[commands](#CommandsInNotebooks)|
+|az machinelearningservices storage-account|StorageAccount|[commands](#CommandsInStorageAccount)|
+|az machinelearningservices workspace-connection|WorkspaceConnections|[commands](#CommandsInWorkspaceConnections)|
+
+## COMMANDS
+### Commands in `az machinelearningservices machine-learning-compute` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices machine-learning-compute list](#MachineLearningComputeListByWorkspace)|ListByWorkspace|[Parameters](#ParametersMachineLearningComputeListByWorkspace)|[Example](#ExamplesMachineLearningComputeListByWorkspace)|
+|[az machinelearningservices machine-learning-compute show](#MachineLearningComputeGet)|Get|[Parameters](#ParametersMachineLearningComputeGet)|[Example](#ExamplesMachineLearningComputeGet)|
+|[az machinelearningservices machine-learning-compute aks create](#MachineLearningComputeCreateOrUpdate#Create#AKS)|CreateOrUpdate#Create#AKS|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#AKS)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#AKS)|
+|[az machinelearningservices machine-learning-compute aml-compute create](#MachineLearningComputeCreateOrUpdate#Create#AmlCompute)|CreateOrUpdate#Create#AmlCompute|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#AmlCompute)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#AmlCompute)|
+|[az machinelearningservices machine-learning-compute compute-instance create](#MachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|CreateOrUpdate#Create#ComputeInstance|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#ComputeInstance)|
+|[az machinelearningservices machine-learning-compute data-factory create](#MachineLearningComputeCreateOrUpdate#Create#DataFactory)|CreateOrUpdate#Create#DataFactory|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#DataFactory)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#DataFactory)|
+|[az machinelearningservices machine-learning-compute data-lake-analytics create](#MachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|CreateOrUpdate#Create#DataLakeAnalytics|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#DataLakeAnalytics)|
+|[az machinelearningservices machine-learning-compute databricks create](#MachineLearningComputeCreateOrUpdate#Create#Databricks)|CreateOrUpdate#Create#Databricks|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#Databricks)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#Databricks)|
+|[az machinelearningservices machine-learning-compute hd-insight create](#MachineLearningComputeCreateOrUpdate#Create#HDInsight)|CreateOrUpdate#Create#HDInsight|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#HDInsight)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#HDInsight)|
+|[az machinelearningservices machine-learning-compute synapse-spark create](#MachineLearningComputeCreateOrUpdate#Create#SynapseSpark)|CreateOrUpdate#Create#SynapseSpark|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#SynapseSpark)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#SynapseSpark)|
+|[az machinelearningservices machine-learning-compute virtual-machine create](#MachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|CreateOrUpdate#Create#VirtualMachine|[Parameters](#ParametersMachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|[Example](#ExamplesMachineLearningComputeCreateOrUpdate#Create#VirtualMachine)|
+|[az machinelearningservices machine-learning-compute update](#MachineLearningComputeUpdate)|Update|[Parameters](#ParametersMachineLearningComputeUpdate)|[Example](#ExamplesMachineLearningComputeUpdate)|
+|[az machinelearningservices machine-learning-compute delete](#MachineLearningComputeDelete)|Delete|[Parameters](#ParametersMachineLearningComputeDelete)|[Example](#ExamplesMachineLearningComputeDelete)|
+|[az machinelearningservices machine-learning-compute list-key](#MachineLearningComputeListKeys)|ListKeys|[Parameters](#ParametersMachineLearningComputeListKeys)|[Example](#ExamplesMachineLearningComputeListKeys)|
+|[az machinelearningservices machine-learning-compute list-node](#MachineLearningComputeListNodes)|ListNodes|[Parameters](#ParametersMachineLearningComputeListNodes)|[Example](#ExamplesMachineLearningComputeListNodes)|
+|[az machinelearningservices machine-learning-compute restart](#MachineLearningComputeRestart)|Restart|[Parameters](#ParametersMachineLearningComputeRestart)|[Example](#ExamplesMachineLearningComputeRestart)|
+|[az machinelearningservices machine-learning-compute start](#MachineLearningComputeStart)|Start|[Parameters](#ParametersMachineLearningComputeStart)|[Example](#ExamplesMachineLearningComputeStart)|
+|[az machinelearningservices machine-learning-compute stop](#MachineLearningComputeStop)|Stop|[Parameters](#ParametersMachineLearningComputeStop)|[Example](#ExamplesMachineLearningComputeStop)|
+
+### Commands in `az machinelearningservices machine-learning-service` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices machine-learning-service list](#MachineLearningServiceListByWorkspace)|ListByWorkspace|[Parameters](#ParametersMachineLearningServiceListByWorkspace)|[Example](#ExamplesMachineLearningServiceListByWorkspace)|
+|[az machinelearningservices machine-learning-service show](#MachineLearningServiceGet)|Get|[Parameters](#ParametersMachineLearningServiceGet)|[Example](#ExamplesMachineLearningServiceGet)|
+|[az machinelearningservices machine-learning-service create](#MachineLearningServiceCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersMachineLearningServiceCreateOrUpdate#Create)|[Example](#ExamplesMachineLearningServiceCreateOrUpdate#Create)|
+|[az machinelearningservices machine-learning-service update](#MachineLearningServiceCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersMachineLearningServiceCreateOrUpdate#Update)|Not Found|
+|[az machinelearningservices machine-learning-service delete](#MachineLearningServiceDelete)|Delete|[Parameters](#ParametersMachineLearningServiceDelete)|[Example](#ExamplesMachineLearningServiceDelete)|
+
+### Commands in `az machinelearningservices notebook` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices notebook list-key](#NotebooksListKeys)|ListKeys|[Parameters](#ParametersNotebooksListKeys)|[Example](#ExamplesNotebooksListKeys)|
+|[az machinelearningservices notebook prepare](#NotebooksPrepare)|Prepare|[Parameters](#ParametersNotebooksPrepare)|[Example](#ExamplesNotebooksPrepare)|
+
+### Commands in `az machinelearningservices private-endpoint-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-endpoint-connection show](#PrivateEndpointConnectionsGet)|Get|[Parameters](#ParametersPrivateEndpointConnectionsGet)|[Example](#ExamplesPrivateEndpointConnectionsGet)|
+|[az machinelearningservices private-endpoint-connection delete](#PrivateEndpointConnectionsDelete)|Delete|[Parameters](#ParametersPrivateEndpointConnectionsDelete)|[Example](#ExamplesPrivateEndpointConnectionsDelete)|
+|[az machinelearningservices private-endpoint-connection put](#PrivateEndpointConnectionsPut)|Put|[Parameters](#ParametersPrivateEndpointConnectionsPut)|[Example](#ExamplesPrivateEndpointConnectionsPut)|
+
+### Commands in `az machinelearningservices private-link-resource` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices private-link-resource list](#PrivateLinkResourcesListByWorkspace)|ListByWorkspace|[Parameters](#ParametersPrivateLinkResourcesListByWorkspace)|[Example](#ExamplesPrivateLinkResourcesListByWorkspace)|
+
+### Commands in `az machinelearningservices quota` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices quota list](#QuotasList)|List|[Parameters](#ParametersQuotasList)|[Example](#ExamplesQuotasList)|
+|[az machinelearningservices quota update](#QuotasUpdate)|Update|[Parameters](#ParametersQuotasUpdate)|[Example](#ExamplesQuotasUpdate)|
+
+### Commands in `az machinelearningservices storage-account` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices storage-account list-key](#StorageAccountListKeys)|ListKeys|[Parameters](#ParametersStorageAccountListKeys)|[Example](#ExamplesStorageAccountListKeys)|
+
+### Commands in `az machinelearningservices usage` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices usage list](#UsagesList)|List|[Parameters](#ParametersUsagesList)|[Example](#ExamplesUsagesList)|
+
+### Commands in `az machinelearningservices virtual-machine-size` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices virtual-machine-size list](#VirtualMachineSizesList)|List|[Parameters](#ParametersVirtualMachineSizesList)|[Example](#ExamplesVirtualMachineSizesList)|
+
+### Commands in `az machinelearningservices workspace` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace list](#WorkspacesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersWorkspacesListByResourceGroup)|[Example](#ExamplesWorkspacesListByResourceGroup)|
+|[az machinelearningservices workspace list](#WorkspacesListBySubscription)|ListBySubscription|[Parameters](#ParametersWorkspacesListBySubscription)|[Example](#ExamplesWorkspacesListBySubscription)|
+|[az machinelearningservices workspace show](#WorkspacesGet)|Get|[Parameters](#ParametersWorkspacesGet)|[Example](#ExamplesWorkspacesGet)|
+|[az machinelearningservices workspace create](#WorkspacesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersWorkspacesCreateOrUpdate#Create)|[Example](#ExamplesWorkspacesCreateOrUpdate#Create)|
+|[az machinelearningservices workspace update](#WorkspacesUpdate)|Update|[Parameters](#ParametersWorkspacesUpdate)|[Example](#ExamplesWorkspacesUpdate)|
+|[az machinelearningservices workspace delete](#WorkspacesDelete)|Delete|[Parameters](#ParametersWorkspacesDelete)|[Example](#ExamplesWorkspacesDelete)|
+|[az machinelearningservices workspace list-key](#WorkspacesListKeys)|ListKeys|[Parameters](#ParametersWorkspacesListKeys)|[Example](#ExamplesWorkspacesListKeys)|
+|[az machinelearningservices workspace list-notebook-access-token](#WorkspacesListNotebookAccessToken)|ListNotebookAccessToken|[Parameters](#ParametersWorkspacesListNotebookAccessToken)|[Example](#ExamplesWorkspacesListNotebookAccessToken)|
+|[az machinelearningservices workspace resync-key](#WorkspacesResyncKeys)|ResyncKeys|[Parameters](#ParametersWorkspacesResyncKeys)|[Example](#ExamplesWorkspacesResyncKeys)|
+
+### Commands in `az machinelearningservices workspace` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace list-sku](#WorkspaceListSkus)|ListSkus|[Parameters](#ParametersWorkspaceListSkus)|[Example](#ExamplesWorkspaceListSkus)|
+
+### Commands in `az machinelearningservices workspace-connection` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-connection list](#WorkspaceConnectionsList)|List|[Parameters](#ParametersWorkspaceConnectionsList)|[Example](#ExamplesWorkspaceConnectionsList)|
+|[az machinelearningservices workspace-connection show](#WorkspaceConnectionsGet)|Get|[Parameters](#ParametersWorkspaceConnectionsGet)|[Example](#ExamplesWorkspaceConnectionsGet)|
+|[az machinelearningservices workspace-connection create](#WorkspaceConnectionsCreate)|Create|[Parameters](#ParametersWorkspaceConnectionsCreate)|[Example](#ExamplesWorkspaceConnectionsCreate)|
+|[az machinelearningservices workspace-connection delete](#WorkspaceConnectionsDelete)|Delete|[Parameters](#ParametersWorkspaceConnectionsDelete)|[Example](#ExamplesWorkspaceConnectionsDelete)|
+
+### Commands in `az machinelearningservices workspace-feature` group
+|CLI Command|Operation Swagger name|Parameters|Examples|
+|---------|------------|--------|-----------|
+|[az machinelearningservices workspace-feature list](#WorkspaceFeaturesList)|List|[Parameters](#ParametersWorkspaceFeaturesList)|[Example](#ExamplesWorkspaceFeaturesList)|
+
+
+## COMMAND DETAILS
+
+### group `az machinelearningservices machine-learning-compute`
+#### Command `az machinelearningservices machine-learning-compute list`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices machine-learning-compute show`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute aks create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remoteLogin\
+PortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBe\
+foreScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-000000000000\
+/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/versions/\
+0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"personal\\"\
+,\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-000000000000\\",\
+\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\\"},\\\
+"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location "eastus" \
+--ak-s-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--ak-s-compute-location**|string|Location for the underlying compute|ak_s_compute_location|computeLocation|
+|**--ak-s-description**|string|The description of the Machine Learning compute.|ak_s_description|description|
+|**--ak-s-resource-id**|string|ARM resource id of the underlying compute|ak_s_resource_id|resourceId|
+|**--ak-s-disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|ak_s_disable_local_auth|disableLocalAuth|
+|**--ak-s-properties**|object|AKS properties|ak_s_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute aml-compute create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\\",\\"remo\
+teLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdl\
+eTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000-0000-00000\
+0000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDefinition/ve\
+rsions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\\":\\"pers\
+onal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000-0000000000\
+00\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\\"Disabled\
+\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" --location "eastus" \
+--aml-compute-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|aml_compute_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|aml_compute_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|aml_compute_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|aml_compute_disable_local_auth|disableLocalAuth|
+|**--aml-compute-properties**|object|AML Compute properties|aml_compute_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute compute-instance create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Wind\
+ows\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\\
+":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0\
+000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImag\
+eDefinition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationT\
+ype\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0\
+000-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\\
+":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" --location \
+"eastus" --compute-instance-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|compute_instance_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|compute_instance_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|compute_instance_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|compute_instance_disable_local_auth|disableLocalAuth|
+|**--compute-instance-properties**|object|Compute Instance properties|compute_instance_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute data-factory create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|data_factory_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|data_factory_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|data_factory_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|data_factory_disable_local_auth|disableLocalAuth|
+
+#### Command `az machinelearningservices machine-learning-compute data-lake-analytics create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|data_lake_analytics_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|data_lake_analytics_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|data_lake_analytics_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|data_lake_analytics_disable_local_auth|disableLocalAuth|
+|**--data-lake-store-account-name**|string|DataLake Store Account Name|data_lake_analytics_data_lake_store_account_name|dataLakeStoreAccountName|
+
+#### Command `az machinelearningservices machine-learning-compute databricks create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|databricks_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|databricks_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|databricks_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|databricks_disable_local_auth|disableLocalAuth|
+|**--databricks-access-token**|string|Databricks access token|databricks_databricks_access_token|databricksAccessToken|
+|**--workspace-url**|string|Workspace Url|databricks_workspace_url|workspaceUrl|
+
+#### Command `az machinelearningservices machine-learning-compute hd-insight create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" --location "eastus" \
+--resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|hd_insight_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|hd_insight_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|hd_insight_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|hd_insight_disable_local_auth|disableLocalAuth|
+|**--ssh-port**|integer|Port open for ssh connections on the master node of the cluster.|hd_insight_ssh_port|sshPort|
+|**--address**|string|Public IP address of the master node of the cluster.|hd_insight_address|address|
+|**--administrator-account**|object|Admin credentials for master node of the cluster|hd_insight_administrator_account|administratorAccount|
+
+#### Command `az machinelearningservices machine-learning-compute synapse-spark create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" --location \
+"eastus" --synapse-spark-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windows\
+\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0\
+,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-0000\
+-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImageDe\
+finition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" --location \
+"eastus" --synapse-spark-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationType\
+\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-0000\
+-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\":\
+\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute synapse-spark create --compute-name "compute123" --location \
+"eastus" --synapse-spark-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \
+"workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|synapse_spark_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|synapse_spark_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|synapse_spark_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|synapse_spark_disable_local_auth|disableLocalAuth|
+|**--synapse-spark-properties**|object|AKS properties|synapse_spark_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute virtual-machine create`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --virtual-machine-properties "{\\"enableNodePublicIp\\":true,\\"isolatedNetwork\\":false,\\"osType\\":\\"Windo\
+ws\\",\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\"\
+:0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"virtualMachineImage\\":{\\"id\\":\\"/subscriptions/00000000-0000-00\
+00-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Compute/galleries/myImageGallery/images/myImage\
+Definition/versions/0.0.1\\"},\\"vmPriority\\":\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --virtual-machine-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"computeInstanceAuthorizationTy\
+pe\\":\\"personal\\",\\"personalComputeInstanceSettings\\":{\\"assignedUser\\":{\\"objectId\\":\\"00000000-0000-0000-00\
+00-000000000000\\",\\"tenantId\\":\\"00000000-0000-0000-0000-000000000000\\"}},\\"sshSettings\\":{\\"sshPublicAccess\\"\
+:\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \
+"testrg123" --workspace-name "workspaces123"
+```
+##### Example
+```
+az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" --location \
+"eastus" --virtual-machine-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--compute-location**|string|Location for the underlying compute|virtual_machine_compute_location|computeLocation|
+|**--description**|string|The description of the Machine Learning compute.|virtual_machine_description|description|
+|**--resource-id**|string|ARM resource id of the underlying compute|virtual_machine_resource_id|resourceId|
+|**--disable-local-auth**|boolean|Opt-out of local authentication and ensure customers can use only MSI and AAD exclusively for authentication.|virtual_machine_disable_local_auth|disableLocalAuth|
+|**--virtual-machine-properties**|object||virtual_machine_properties|properties|
+
+#### Command `az machinelearningservices machine-learning-compute update`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \
+max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--scale-settings**|object|Desired scale settings for the amlCompute.|scale_settings|scaleSettings|
+
+#### Command `az machinelearningservices machine-learning-compute delete`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group "testrg123" \
+--underlying-resource-action "Delete" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+|**--underlying-resource-action**|choice|Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'.|underlying_resource_action|underlyingResourceAction|
+
+#### Command `az machinelearningservices machine-learning-compute list-key`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list-key --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute list-node`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute list-node --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute restart`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute restart --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute start`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+#### Command `az machinelearningservices machine-learning-compute stop`
+
+##### Example
+```
+az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group "testrg123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName|
+
+### group `az machinelearningservices machine-learning-service`
+#### Command `az machinelearningservices machine-learning-service list`
+
+##### Example
+```
+az machinelearningservices machine-learning-service list --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+|**--model-id**|string|The Model Id.|model_id|modelId|
+|**--model-name**|string|The Model name.|model_name|modelName|
+|**--tag**|string|The object tag.|tag|tag|
+|**--tags**|string|A set of tags with which to filter the returned services. It is a comma separated string of tags key or tags key=value Example: tagKey1,tagKey2,tagKey3=value3 .|tags|tags|
+|**--properties**|string|A set of properties with which to filter the returned services. It is a comma separated string of properties key and/or properties key=value Example: propKey1,propKey2,propKey3=value3 .|properties|properties|
+|**--run-id**|string|runId for model associated with service.|run_id|runId|
+|**--expand**|boolean|Set to True to include Model details.|expand|expand|
+|**--orderby**|choice|The option to order the response.|orderby|orderby|
+
+#### Command `az machinelearningservices machine-learning-service show`
+
+##### Example
+```
+az machinelearningservices machine-learning-service show --resource-group "testrg123" --service-name "service123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+|**--expand**|boolean|Set to True to include Model details.|expand|expand|
+
+#### Command `az machinelearningservices machine-learning-service create`
+
+##### Example
+```
+az machinelearningservices machine-learning-service create --properties "{\\"appInsightsEnabled\\":true,\\"authEnabled\
+\\":true,\\"computeType\\":\\"ACI\\",\\"containerResourceRequirements\\":{\\"cpu\\":1,\\"memoryInGB\\":1},\\"environmen\
+tImageRequest\\":{\\"assets\\":[{\\"id\\":null,\\"mimeType\\":\\"application/x-python\\",\\"unpack\\":false,\\"url\\":\
+\\"aml://storage/azureml/score.py\\"}],\\"driverProgram\\":\\"score.py\\",\\"environment\\":{\\"name\\":\\"AzureML-Scik\
+it-learn-0.20.3\\",\\"docker\\":{\\"baseDockerfile\\":null,\\"baseImage\\":\\"mcr.microsoft.com/azureml/base:openmpi3.1\
+.2-ubuntu16.04\\",\\"baseImageRegistry\\":{\\"address\\":null,\\"password\\":null,\\"username\\":null}},\\"environmentV\
+ariables\\":{\\"EXAMPLE_ENV_VAR\\":\\"EXAMPLE_VALUE\\"},\\"inferencingStackVersion\\":null,\\"python\\":{\\"baseCondaEn\
+vironment\\":null,\\"condaDependencies\\":{\\"name\\":\\"azureml_ae1acbe6e1e6aabbad900b53c491a17c\\",\\"channels\\":[\\\
+"conda-forge\\"],\\"dependencies\\":[\\"python=3.6.2\\",{\\"pip\\":[\\"azureml-core==1.0.69\\",\\"azureml-defaults==1.0\
+.69\\",\\"azureml-telemetry==1.0.69\\",\\"azureml-train-restclients-hyperdrive==1.0.69\\",\\"azureml-train-core==1.0.69\
+\\",\\"scikit-learn==0.20.3\\",\\"scipy==1.2.1\\",\\"numpy==1.16.2\\",\\"joblib==0.13.2\\"]}]},\\"interpreterPath\\":\\\
+"python\\",\\"userManagedDependencies\\":false},\\"spark\\":{\\"packages\\":[],\\"precachePackages\\":true,\\"repositor\
+ies\\":[]},\\"version\\":\\"3\\"},\\"models\\":[{\\"name\\":\\"sklearn_regression_model.pkl\\",\\"mimeType\\":\\"applic\
+ation/x-python\\",\\"url\\":\\"aml://storage/azureml/sklearn_regression_model.pkl\\"}]},\\"location\\":\\"eastus2\\"}" \
+--resource-group "testrg123" --service-name "service456" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+|**--properties**|object|The payload that is used to create or update the Service.|properties|properties|
+
+#### Command `az machinelearningservices machine-learning-service update`
+
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+|**--properties**|object|The payload that is used to create or update the Service.|properties|properties|
+
+#### Command `az machinelearningservices machine-learning-service delete`
+
+##### Example
+```
+az machinelearningservices machine-learning-service delete --resource-group "testrg123" --service-name "service123" \
+--workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--service-name**|string|Name of the Azure Machine Learning service.|service_name|serviceName|
+
+### group `az machinelearningservices notebook`
+#### Command `az machinelearningservices notebook list-key`
+
+##### Example
+```
+az machinelearningservices notebook list-key --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices notebook prepare`
+
+##### Example
+```
+az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices private-endpoint-connection`
+#### Command `az machinelearningservices private-endpoint-connection show`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" --resource-group \
+"rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection delete`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \
+--resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+
+#### Command `az machinelearningservices private-endpoint-connection put`
+
+##### Example
+```
+az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \
+--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \
+--workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState|
+
+### group `az machinelearningservices private-link-resource`
+#### Command `az machinelearningservices private-link-resource list`
+
+##### Example
+```
+az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices quota`
+#### Command `az machinelearningservices quota list`
+
+##### Example
+```
+az machinelearningservices quota list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+#### Command `az machinelearningservices quota update`
+
+##### Example
+```
+az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServices/workspaces/\
+quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningSe\
+rvices/workspaces/demo_workspace1/quotas/Standard_DSv2_Family_Cluster_Dedicated_vCPUs" limit=100 unit="Count" --value \
+type="Microsoft.MachineLearningServices/workspaces/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/reso\
+urceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Standard_DSv2_Family_Cluste\
+r_Dedicated_vCPUs" limit=200 unit="Count"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for update quota is queried.|location|location|
+|**--value**|array|The list for update quota.|value|value|
+|**--quota-update-parameters-location**|string|Region of workspace quota to be updated.|quota_update_parameters_location|location|
+
+### group `az machinelearningservices storage-account`
+#### Command `az machinelearningservices storage-account list-key`
+
+##### Example
+```
+az machinelearningservices storage-account list-key --resource-group "testrg123" --workspace-name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices usage`
+#### Command `az machinelearningservices usage list`
+
+##### Example
+```
+az machinelearningservices usage list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location for which resource usage is queried.|location|location|
+
+### group `az machinelearningservices virtual-machine-size`
+#### Command `az machinelearningservices virtual-machine-size list`
+
+##### Example
+```
+az machinelearningservices virtual-machine-size list --location "eastus"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--location**|string|The location upon which virtual-machine-sizes is queried.|location|location|
+
+### group `az machinelearningservices workspace`
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list --resource-group "workspace-1234"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--skip**|string|Continuation token for pagination.|skip|$skip|
+
+#### Command `az machinelearningservices workspace list`
+
+##### Example
+```
+az machinelearningservices workspace list
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+#### Command `az machinelearningservices workspace show`
+
+##### Example
+```
+az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace create`
+
+##### Example
+```
+az machinelearningservices workspace create --identity type="SystemAssigned,UserAssigned" \
+userAssignedIdentities={"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mi\
+crosoft.ManagedIdentity/userAssignedIdentities/testuai":{}} --location "eastus2euap" --description "test description" \
+--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\
+rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\
+urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" --identity \
+user-assigned-identity="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Mic\
+rosoft.ManagedIdentity/userAssignedIdentities/testuai" --key-vault-properties identity-client-id="" \
+key-identifier="https://testkv.vault.azure.net/keys/testkey/aabbccddee112233445566778899aabb" \
+key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft\
+.KeyVault/vaults/testkv" --status "Enabled" --friendly-name "HelloName" --hbi-workspace false --key-vault \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.KeyVault/vaults/\
+testkv" --shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/00000000-1111-22\
+22-3333-444444444444/resourceGroups/workspace-1234/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/priva\
+teLinkResources/Sql" group-id="Sql" request-message="Please approve" status="Approved" --storage-account \
+"/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accountcrud-1234/providers/Microsoft.Storage/storag\
+eAccounts/testStorageAccount" --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--location**|string|Specifies the location of the resource.|location|location|
+|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace. This name in mutable|friendly_name|friendlyName|
+|**--key-vault**|string|ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created|key_vault|keyVault|
+|**--application-insights**|string|ARM id of the application insights associated with this workspace. This cannot be changed once the workspace has been created|application_insights|applicationInsights|
+|**--container-registry**|string|ARM id of the container registry associated with this workspace. This cannot be changed once the workspace has been created|container_registry|containerRegistry|
+|**--storage-account**|string|ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created|storage_account|storageAccount|
+|**--discovery-url**|string|Url for the discovery service to identify regional endpoints for machine learning experimentation services|discovery_url|discoveryUrl|
+|**--hbi-workspace**|boolean|The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service|hbi_workspace|hbiWorkspace|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--allow-public-access-when-behind-vnet**|boolean|The flag to indicate whether to allow public access when behind VNet.|allow_public_access_when_behind_vnet|allowPublicAccessWhenBehindVnet|
+|**--shared-private-link-resources**|array|The list of shared private link resources in this workspace.|shared_private_link_resources|sharedPrivateLinkResources|
+|**--primary-user-assigned-identity**|string|The user assigned identity resource id that represents the workspace identity.|primary_user_assigned_identity|primaryUserAssignedIdentity|
+|**--collections-throughput**|integer|The throughput of the collections in cosmosdb database|collections_throughput|collectionsThroughput|
+|**--status**|choice|Indicates whether or not the encryption is enabled for the workspace.|status|status|
+|**--identity**|object|The identity that will be used to access the key vault for encryption at rest.|identity|identity|
+|**--key-vault-properties**|object|Customer Key vault properties.|key_vault_properties|keyVaultProperties|
+
+#### Command `az machinelearningservices workspace update`
+
+##### Example
+```
+az machinelearningservices workspace update --description "new description" --friendly-name "New friendly name" \
+--resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--tags**|dictionary|The resource tags for the machine learning workspace.|tags|tags|
+|**--sku**|object|The sku of the workspace.|sku|sku|
+|**--description**|string|The description of this workspace.|description|description|
+|**--friendly-name**|string|The friendly name for this workspace.|friendly_name|friendlyName|
+|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute|
+|**--primary-user-assigned-identity**|string|The user assigned identity resource id that represents the workspace identity.|primary_user_assigned_identity|primaryUserAssignedIdentity|
+|**--collections-throughput**|integer|The throughput of the collections in cosmosdb database|collections_throughput|collectionsThroughput|
+|**--type**|sealed-choice|The identity type.|type|type|
+|**--user-assigned-identities**|dictionary|The user assigned identities associated with the resource.|user_assigned_identities|userAssignedIdentities|
+
+#### Command `az machinelearningservices workspace delete`
+
+##### Example
+```
+az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-key`
+
+##### Example
+```
+az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace list-notebook-access-token`
+
+##### Example
+```
+az machinelearningservices workspace list-notebook-access-token --resource-group "workspace-1234" --name \
+"testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+#### Command `az machinelearningservices workspace resync-key`
+
+##### Example
+```
+az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+
+### group `az machinelearningservices workspace`
+#### Command `az machinelearningservices workspace list-sku`
+
+##### Example
+```
+az machinelearningservices workspace list-sku
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+### group `az machinelearningservices workspace-connection`
+#### Command `az machinelearningservices workspace-connection list`
+
+##### Example
+```
+az machinelearningservices workspace-connection list --category "ACR" --resource-group "resourceGroup-1" --target \
+"www.facebook.com" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--category**|string|Category of the workspace connection.|category|category|
+
+#### Command `az machinelearningservices workspace-connection show`
+
+##### Example
+```
+az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+#### Command `az machinelearningservices workspace-connection create`
+
+##### Example
+```
+az machinelearningservices workspace-connection create --connection-name "connection-1" --name "connection-1" \
+--auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group "resourceGroup-1" \
+--workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+|**--name**|string|Friendly name of the workspace connection|name|name|
+|**--category**|string|Category of the workspace connection.|category|category|
+|**--target**|string|Target of the workspace connection.|target|target|
+|**--auth-type**|string|Authorization type of the workspace connection.|auth_type|authType|
+|**--value**|string|Value details of the workspace connection.|value|value|
+
+#### Command `az machinelearningservices workspace-connection delete`
+
+##### Example
+```
+az machinelearningservices workspace-connection delete --connection-name "connection-1" --resource-group \
+"resourceGroup-1" --workspace-name "workspace-1"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
+|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName|
+
+### group `az machinelearningservices workspace-feature`
+#### Command `az machinelearningservices workspace-feature list`
+
+##### Example
+```
+az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name "testworkspace"
+```
+##### Parameters
+|Option|Type|Description|Path (SDK)|Swagger name|
+|------|----|-----------|----------|------------|
+|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName|
+|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName|
diff --git a/src/machinelearningservices/setup.cfg b/src/machinelearningservices/setup.cfg
new file mode 100644
index 00000000000..2fdd96e5d39
--- /dev/null
+++ b/src/machinelearningservices/setup.cfg
@@ -0,0 +1 @@
+#setup.cfg
\ No newline at end of file
diff --git a/src/machinelearningservices/setup.py b/src/machinelearningservices/setup.py
new file mode 100644
index 00000000000..e4ec7166802
--- /dev/null
+++ b/src/machinelearningservices/setup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# --------------------------------------------------------------------------------------------
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License. See License.txt in the project root for license information.
+# --------------------------------------------------------------------------------------------
+
+
+from codecs import open
+from setuptools import setup, find_packages
+
+# HISTORY.rst entry.
+VERSION = '0.1.0'
+try:
+ from azext_machinelearningservices.manual.version import VERSION
+except ImportError:
+ pass
+
+# The full list of classifiers is available at
+# https://pypi.python.org/pypi?%3Aaction=list_classifiers
+CLASSIFIERS = [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: System Administrators',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Programming Language :: Python :: 3.8',
+ 'License :: OSI Approved :: MIT License',
+]
+
+DEPENDENCIES = []
+
+try:
+ from azext_machinelearningservices.manual.dependency import DEPENDENCIES
+except ImportError:
+ pass
+
+with open('README.md', 'r', encoding='utf-8') as f:
+ README = f.read()
+with open('HISTORY.rst', 'r', encoding='utf-8') as f:
+ HISTORY = f.read()
+
+setup(
+ name='machinelearningservices',
+ version=VERSION,
+ description='Microsoft Azure Command-Line Tools AzureMachineLearningWorkspaces Extension',
+ author='Microsoft Corporation',
+ author_email='azpycli@microsoft.com',
+ url='https://github.com/Azure/azure-cli-extensions/tree/master/src/machinelearningservices',
+ long_description=README + '\n\n' + HISTORY,
+ license='MIT',
+ classifiers=CLASSIFIERS,
+ packages=find_packages(),
+ install_requires=DEPENDENCIES,
+ package_data={'azext_machinelearningservices': ['azext_metadata.json']},
+)