From af3c51bf03e06ba61d81b818f59e7da282332d9e Mon Sep 17 00:00:00 2001 From: SDK Automation Date: Wed, 23 Sep 2020 06:06:57 +0000 Subject: [PATCH] Generated from 6dfc7a9ea07381a2e1d8641548bc5c29ac10c56f --- src/machinelearningservices/HISTORY.rst | 8 + src/machinelearningservices/README.md | 5 + .../azext_machinelearningservices/__init__.py | 50 + .../azext_machinelearningservices/action.py | 17 + .../azext_metadata.json | 4 + .../azext_machinelearningservices/custom.py | 17 + .../generated/__init__.py | 12 + .../generated/_client_factory.py | 56 + .../generated/_help.py | 1002 +++++ .../generated/_params.py | 433 ++ .../generated/_validators.py | 9 + .../generated/action.py | 228 + .../generated/commands.py | 162 + .../generated/custom.py | 617 +++ .../manual/__init__.py | 12 + .../tests/__init__.py | 114 + .../tests/latest/__init__.py | 12 + .../test_machinelearningservices_scenario.py | 1356 ++++++ .../vendored_sdks/__init__.py | 12 + .../machinelearningservices/__init__.py | 16 + .../_azure_machine_learning_workspaces.py | 120 + .../machinelearningservices/_configuration.py | 71 + .../machinelearningservices/aio/__init__.py | 10 + ...azure_machine_learning_workspaces_async.py | 114 + .../aio/_configuration_async.py | 67 + .../aio/operations_async/__init__.py | 35 + ...ne_learning_workspaces_operations_async.py | 86 + ...chine_learning_compute_operations_async.py | 876 ++++ .../_notebook_operations_async.py | 151 + .../_operation_operations_async.py | 102 + ...te_endpoint_connection_operations_async.py | 286 ++ ..._private_link_resource_operations_async.py | 96 + .../_quota_operations_async.py | 172 + .../_usage_operations_async.py | 110 + .../_virtual_machine_size_operations_async.py | 92 + .../_workspace_connection_operations_async.py | 323 ++ .../_workspace_feature_operations_async.py | 114 + .../_workspace_operations_async.py | 746 ++++ .../models/__init__.py | 303 ++ ...azure_machine_learning_workspaces_enums.py | 215 + .../machinelearningservices/models/_models.py | 3430 +++++++++++++++ .../models/_models_py3.py | 3682 +++++++++++++++++ .../operations/__init__.py | 35 + ..._machine_learning_workspaces_operations.py | 91 + .../_machine_learning_compute_operations.py | 893 ++++ .../operations/_notebook_operations.py | 157 + .../operations/_operation_operations.py | 107 + ..._private_endpoint_connection_operations.py | 294 ++ .../_private_link_resource_operations.py | 101 + .../operations/_quota_operations.py | 178 + .../operations/_usage_operations.py | 115 + .../_virtual_machine_size_operations.py | 97 + .../_workspace_connection_operations.py | 331 ++ .../_workspace_feature_operations.py | 119 + .../operations/_workspace_operations.py | 760 ++++ .../machinelearningservices/py.typed | 1 + src/machinelearningservices/report.md | 911 ++++ src/machinelearningservices/setup.cfg | 1 + src/machinelearningservices/setup.py | 57 + 59 files changed, 19591 insertions(+) create mode 100644 src/machinelearningservices/HISTORY.rst create mode 100644 src/machinelearningservices/README.md create mode 100644 src/machinelearningservices/azext_machinelearningservices/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/action.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/azext_metadata.json create mode 100644 src/machinelearningservices/azext_machinelearningservices/custom.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/_help.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/_params.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/_validators.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/action.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/commands.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/generated/custom.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/manual/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/tests/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_azure_machine_learning_workspaces_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_machine_learning_compute_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_notebook_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_operation_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_endpoint_connection_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_link_resource_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_quota_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_usage_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_virtual_machine_size_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_connection_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_feature_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_operations_async.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebook_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operation_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connection_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resource_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quota_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usage_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_size_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connection_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_feature_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py create mode 100644 src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed create mode 100644 src/machinelearningservices/report.md create mode 100644 src/machinelearningservices/setup.cfg create mode 100644 src/machinelearningservices/setup.py diff --git a/src/machinelearningservices/HISTORY.rst b/src/machinelearningservices/HISTORY.rst new file mode 100644 index 00000000000..1c139576ba0 --- /dev/null +++ b/src/machinelearningservices/HISTORY.rst @@ -0,0 +1,8 @@ +.. :changelog: + +Release History +=============== + +0.1.0 +++++++ +* Initial release. diff --git a/src/machinelearningservices/README.md b/src/machinelearningservices/README.md new file mode 100644 index 00000000000..649c4148bdc --- /dev/null +++ b/src/machinelearningservices/README.md @@ -0,0 +1,5 @@ +Microsoft Azure CLI 'machinelearningservices' Extension +========================================== + +This package is for the 'machinelearningservices' extension. +i.e. 'az machinelearningservices' diff --git a/src/machinelearningservices/azext_machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/__init__.py new file mode 100644 index 00000000000..b234b2a3aa6 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/__init__.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +from azure.cli.core import AzCommandsLoader +from azext_machinelearningservices.generated._help import helps # pylint: disable=unused-import +try: + from azext_machinelearningservices.manual._help import helps # pylint: disable=reimported +except ImportError: + pass + + +class AzureMachineLearningWorkspacesCommandsLoader(AzCommandsLoader): + + def __init__(self, cli_ctx=None): + from azure.cli.core.commands import CliCommandType + from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices_cl + machinelearningservices_custom = CliCommandType( + operations_tmpl='azext_machinelearningservices.custom#{}', + client_factory=cf_machinelearningservices_cl) + parent = super(AzureMachineLearningWorkspacesCommandsLoader, self) + parent.__init__(cli_ctx=cli_ctx, custom_command_type=machinelearningservices_custom) + + def load_command_table(self, args): + from azext_machinelearningservices.generated.commands import load_command_table + load_command_table(self, args) + try: + from azext_machinelearningservices.manual.commands import load_command_table as load_command_table_manual + load_command_table_manual(self, args) + except ImportError: + pass + return self.command_table + + def load_arguments(self, command): + from azext_machinelearningservices.generated._params import load_arguments + load_arguments(self, command) + try: + from azext_machinelearningservices.manual._params import load_arguments as load_arguments_manual + load_arguments_manual(self, command) + except ImportError: + pass + + +COMMAND_LOADER_CLS = AzureMachineLearningWorkspacesCommandsLoader diff --git a/src/machinelearningservices/azext_machinelearningservices/action.py b/src/machinelearningservices/azext_machinelearningservices/action.py new file mode 100644 index 00000000000..d95d53bf711 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/action.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.action import * # noqa: F403 +try: + from .manual.action import * # noqa: F403 +except ImportError: + pass diff --git a/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json new file mode 100644 index 00000000000..4f48fa652a5 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/azext_metadata.json @@ -0,0 +1,4 @@ +{ + "azext.isExperimental": true, + "azext.minCliCoreVersion": "2.11.0" +} \ No newline at end of file diff --git a/src/machinelearningservices/azext_machinelearningservices/custom.py b/src/machinelearningservices/azext_machinelearningservices/custom.py new file mode 100644 index 00000000000..dbe9d5f9742 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/custom.py @@ -0,0 +1,17 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wildcard-import +# pylint: disable=unused-wildcard-import + +from .generated.custom import * # noqa: F403 +try: + from .manual.custom import * # noqa: F403 +except ImportError: + pass diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py new file mode 100644 index 00000000000..a170ab6c44a --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/_client_factory.py @@ -0,0 +1,56 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +def cf_machinelearningservices_cl(cli_ctx, *_): + from azure.cli.core.commands.client_factory import get_mgmt_service_client + from ..vendored_sdks.machinelearningservices import AzureMachineLearningWorkspaces + return get_mgmt_service_client(cli_ctx, + AzureMachineLearningWorkspaces) + + +def cf_workspace(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).workspace + + +def cf_workspace_feature(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).workspace_feature + + +def cf_notebook(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).notebook + + +def cf_usage(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).usage + + +def cf_virtual_machine_size(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).virtual_machine_size + + +def cf_quota(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).quota + + +def cf_workspace_connection(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).workspace_connection + + +def cf_machine_learning_compute(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).machine_learning_compute + + +def cf_private_endpoint_connection(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).private_endpoint_connection + + +def cf_private_link_resource(cli_ctx, *_): + return cf_machinelearningservices_cl(cli_ctx).private_link_resource diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_help.py b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py new file mode 100644 index 00000000000..7640345aa3d --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/_help.py @@ -0,0 +1,1002 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines + +from knack.help_files import helps + + +helps['machinelearningservices workspace'] = """ + type: group + short-summary: machinelearningservices workspace +""" + +helps['machinelearningservices workspace list'] = """ + type: command + short-summary: "Lists all the available machine learning workspaces under the specified subscription." + examples: + - name: Get Workspaces by Resource Group + text: |- + az machinelearningservices workspace list --resource-group "workspace-1234" +""" + +helps['machinelearningservices workspace show'] = """ + type: command + short-summary: "Gets the properties of the specified machine learning workspace." + examples: + - name: Get Workspace + text: |- + az machinelearningservices workspace show --resource-group "workspace-1234" --name "testworkspace" +""" + +helps['machinelearningservices workspace create'] = """ + type: command + short-summary: "Creates or updates a workspace with the specified parameters." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + - name: --shared-private-link-resources + short-summary: "The list of shared private link resources in this workspace." + long-summary: | + Usage: --shared-private-link-resources name=XX private-link-resource-id=XX group-id=XX request-message=XX \ +status=XX + + name: Unique name of the private link. + private-link-resource-id: The resource id that private link links to. + group-id: The private link resource group id. + request-message: Request message. + status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service. + + Multiple actions can be specified by using more than one --shared-private-link-resources argument. + - name: --encryption-key-vault-properties + short-summary: "Customer Key vault properties." + long-summary: | + Usage: --encryption-key-vault-properties key-vault-arm-id=XX key-identifier=XX identity-client-id=XX + + key-vault-arm-id: Required. The ArmId of the keyVault where the customer owned encryption key is present. + key-identifier: Required. Key vault uri to access the encryption key. + identity-client-id: For future use - The client id of the identity which will be used to access key vault. + examples: + - name: Create Workspace + text: |- + az machinelearningservices workspace create --location "eastus2euap" --description "test description" \ +--application-insights "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/mic\ +rosoft.insights/components/testinsights" --container-registry "/subscriptions/00000000-1111-2222-3333-444444444444/reso\ +urceGroups/workspace-1234/providers/Microsoft.ContainerRegistry/registries/testRegistry" \ +--encryption-key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/keys/testkey/aab\ +bccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/wor\ +kspace-1234/providers/Microsoft.KeyVault/vaults/testkv" --encryption-status "Enabled" --friendly-name "HelloName" \ +--hbi-workspace false --key-vault "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/pr\ +oviders/Microsoft.KeyVault/vaults/testkv" --shared-private-link-resources name="testdbresource" \ +private-link-resource-id="/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/workspace-1234/providers/M\ +icrosoft.DocumentDB/databaseAccounts/testdbresource/privateLinkResources/Sql" group-id="Sql" request-message="Please \ +approve" status="Approved" --storage-account "/subscriptions/00000000-1111-2222-3333-444444444444/resourceGroups/accoun\ +tcrud-1234/providers/Microsoft.Storage/storageAccounts/testStorageAccount" --sku name="Basic" tier="Basic" \ +--resource-group "workspace-1234" --name "testworkspace" +""" + +helps['machinelearningservices workspace update'] = """ + type: command + short-summary: "Updates a machine learning workspace with the specified parameters." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + examples: + - name: Update Workspace + text: |- + az machinelearningservices workspace update --description "new description" --friendly-name "New \ +friendly name" --sku name="Enterprise" tier="Enterprise" --resource-group "workspace-1234" --name "testworkspace" +""" + +helps['machinelearningservices workspace delete'] = """ + type: command + short-summary: "Deletes a machine learning workspace." + examples: + - name: Delete Workspace + text: |- + az machinelearningservices workspace delete --resource-group "workspace-1234" --name "testworkspace" +""" + +helps['machinelearningservices workspace list-key'] = """ + type: command + short-summary: "Lists all the keys associated with this workspace. This includes keys for the storage account, app \ +insights and password for container registry" + examples: + - name: List Workspace Keys + text: |- + az machinelearningservices workspace list-key --resource-group "testrg123" --name "workspaces123" +""" + +helps['machinelearningservices workspace resync-key'] = """ + type: command + short-summary: "Resync all the keys associated with this workspace. This includes keys for the storage account, \ +app insights and password for container registry" + examples: + - name: Resync Workspace Keys + text: |- + az machinelearningservices workspace resync-key --resource-group "testrg123" --name "workspaces123" +""" + +helps['machinelearningservices workspace wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices workspace is met. + examples: + - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \ +created. + text: |- + az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \ +--created + - name: Pause executing next line of CLI script until the machinelearningservices workspace is successfully \ +deleted. + text: |- + az machinelearningservices workspace wait --resource-group "workspace-1234" --name "testworkspace" \ +--deleted +""" + +helps['machinelearningservices workspace-feature'] = """ + type: group + short-summary: machinelearningservices workspace-feature +""" + +helps['machinelearningservices workspace-feature list'] = """ + type: command + short-summary: "Lists all enabled features for a workspace" + examples: + - name: List Workspace features + text: |- + az machinelearningservices workspace-feature list --resource-group "myResourceGroup" --workspace-name \ +"testworkspace" +""" + +helps['machinelearningservices notebook'] = """ + type: group + short-summary: machinelearningservices notebook +""" + +helps['machinelearningservices notebook prepare'] = """ + type: command + short-summary: "" + examples: + - name: Prepare Notebook + text: |- + az machinelearningservices notebook prepare --resource-group "testrg123" --workspace-name \ +"workspaces123" +""" + +helps['machinelearningservices usage'] = """ + type: group + short-summary: machinelearningservices usage +""" + +helps['machinelearningservices usage list'] = """ + type: command + short-summary: "Gets the current usage information as well as limits for AML resources for given subscription and \ +location." + examples: + - name: List Usages + text: |- + az machinelearningservices usage list --location "eastus" +""" + +helps['machinelearningservices virtual-machine-size'] = """ + type: group + short-summary: machinelearningservices virtual-machine-size +""" + +helps['machinelearningservices virtual-machine-size list'] = """ + type: command + short-summary: "Returns supported VM Sizes in a location" + examples: + - name: List VM Sizes + text: |- + az machinelearningservices virtual-machine-size list --location "eastus" +""" + +helps['machinelearningservices quota'] = """ + type: group + short-summary: machinelearningservices quota +""" + +helps['machinelearningservices quota list'] = """ + type: command + short-summary: "Gets the currently assigned Workspace Quotas based on VMFamily." + examples: + - name: List workspace quotas by VMFamily + text: |- + az machinelearningservices quota list --location "eastus" +""" + +helps['machinelearningservices quota update'] = """ + type: command + short-summary: "Update quota for each VM family in workspace." + parameters: + - name: --value + short-summary: "The list for update quota." + long-summary: | + Usage: --value id=XX type=XX limit=XX unit=XX + + id: Specifies the resource ID. + type: Specifies the resource type. + limit: The maximum permitted quota of the resource. + unit: An enum describing the unit of quota measurement. + + Multiple actions can be specified by using more than one --value argument. + examples: + - name: update quotas + text: |- + az machinelearningservices quota update --location "eastus" --value type="Microsoft.MachineLearningServi\ +ces/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg/provide\ +rs/Microsoft.MachineLearningServices/workspaces/demo_workspace1/quotas/StandardDSv2Family" limit=100 unit="Count" \ +--value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" id="/subscriptions/00000000-0000-0000\ +-0000-000000000000/resourceGroups/rg/providers/Microsoft.MachineLearningServices/workspaces/demo_workspace2/quotas/Stan\ +dardDSv2Family" limit=200 unit="Count" +""" + +helps['machinelearningservices workspace-connection'] = """ + type: group + short-summary: machinelearningservices workspace-connection +""" + +helps['machinelearningservices workspace-connection list'] = """ + type: command + short-summary: "List all connections under a AML workspace." + examples: + - name: ListWorkspaceConnections + text: |- + az machinelearningservices workspace-connection list --category "ACR" --resource-group \ +"resourceGroup-1" --target "www.facebook.com" --workspace-name "workspace-1" +""" + +helps['machinelearningservices workspace-connection show'] = """ + type: command + short-summary: "Get the detail of a workspace connection." + examples: + - name: GetWorkspaceConnection + text: |- + az machinelearningservices workspace-connection show --connection-name "connection-1" --resource-group \ +"resourceGroup-1" --workspace-name "workspace-1" +""" + +helps['machinelearningservices workspace-connection create'] = """ + type: command + short-summary: "Add a new workspace connection." + examples: + - name: CreateWorkspaceConnection + text: |- + az machinelearningservices workspace-connection create --connection-name "connection-1" --name \ +"connection-1" --auth-type "PAT" --category "ACR" --target "www.facebook.com" --value "secrets" --resource-group \ +"resourceGroup-1" --workspace-name "workspace-1" +""" + +helps['machinelearningservices workspace-connection delete'] = """ + type: command + short-summary: "Delete a workspace connection." + examples: + - name: DeleteWorkspaceConnection + text: |- + az machinelearningservices workspace-connection delete --connection-name "connection-1" \ +--resource-group "resourceGroup-1" --workspace-name "workspace-1" +""" + +helps['machinelearningservices machine-learning-compute'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute +""" + +helps['machinelearningservices machine-learning-compute list'] = """ + type: command + short-summary: "Gets computes in specified workspace." + examples: + - name: Get Computes + text: |- + az machinelearningservices machine-learning-compute list --resource-group "testrg123" --workspace-name \ +"workspaces123" +""" + +helps['machinelearningservices machine-learning-compute show'] = """ + type: command + short-summary: "Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not \ +returned - use 'keys' nested resource to get them." + examples: + - name: Get a AKS Compute + text: |- + az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" + - name: Get a AML Compute + text: |- + az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" + - name: Get a ComputeInstance + text: |- + az machinelearningservices machine-learning-compute show --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute aks'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group aks +""" + +helps['machinelearningservices machine-learning-compute aks create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \ +"eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --properties-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scal\ +eSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\ +\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \ +"eastus" --properties-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPublicAccess\ +\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group \ +"testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \ +"eastus" --properties-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name \ +"workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \ +"eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" --location \ +"eastus" --description "some compute" --properties-properties "{\\"agentCount\\":4}" --resource-id \ +"/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/resourcegroups/testrg123/providers/Microsoft.ContainerService/mana\ +gedClusters/compute123-56826-c9b00420020b2" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute aks create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --properties-properties "{\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\"\ +:0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}}" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute aml-compute'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group aml-compute +""" + +helps['machinelearningservices machine-learning-compute aml-compute create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --properties-properties "{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scal\ +eSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"},\\"vmPriority\\":\ +\\"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--location "eastus" --properties-properties "{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{\\"sshPu\ +blicAccess\\":\\"Disabled\\"},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}" \ +--resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--location "eastus" --properties-properties "{\\"vmSize\\":\\"STANDARD_NC6\\"}" --resource-group "testrg123" \ +--workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--location "eastus" --description "some compute" --properties-properties "{\\"agentCount\\":4}" --resource-id \ +"/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/resourcegroups/testrg123/providers/Microsoft.ContainerService/mana\ +gedClusters/compute123-56826-c9b00420020b2" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute aml-compute create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --properties-properties "{\\"scaleSettings\\":{\\"maxNodeCount\\":1,\\"minNodeCount\\"\ +:0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}}" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute compute-instance'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group compute-instance +""" + +helps['machinelearningservices machine-learning-compute compute-instance create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + - name: --ssh-settings + short-summary: "Specifies policy and settings for SSH access." + long-summary: | + Usage: --ssh-settings ssh-public-access=XX admin-public-key=XX + + ssh-public-access: State of the public SSH port. Possible values are: Disabled - Indicates that the public \ +ssh port is closed on this instance. Enabled - Indicates that the public ssh port is open and accessible according to \ +the VNet/subnet policy if applicable. + admin-public-key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t rsa -b 2048" to \ +generate your SSH key pairs. + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--location "eastus" --description "some compute" --resource-id "/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/res\ +ourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2" \ +--resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute compute-instance create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute data-factory'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group data-factory +""" + +helps['machinelearningservices machine-learning-compute data-factory create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--location "eastus" --description "some compute" --resource-id "/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/res\ +ourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2" \ +--resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute data-factory create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute data-lake-analytics'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group data-lake-analytics +""" + +helps['machinelearningservices machine-learning-compute data-lake-analytics create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities \ +"{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIden\ +tity/userAssignedIdentities/identity-name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name \ +"workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --location "eastus" --description "some compute" --resource-id "/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d\ +1a69ab345/resourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2"\ + --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute data-lake-analytics create --compute-name \ +"compute123" --identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities \ +"{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIden\ +tity/userAssignedIdentities/identity-name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name \ +"workspaces123" +""" + +helps['machinelearningservices machine-learning-compute databricks'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group databricks +""" + +helps['machinelearningservices machine-learning-compute databricks create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--location "eastus" --description "some compute" --resource-id "/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/res\ +ourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2" \ +--resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute databricks create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute hd-insight'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group hd-insight +""" + +helps['machinelearningservices machine-learning-compute hd-insight create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + - name: --administrator-account + short-summary: "Admin credentials for master node of the cluster" + long-summary: | + Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX + + username: Username of admin account + password: Password of admin account + public-key-data: Public key data + private-key-data: Private key data + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--location "eastus" --description "some compute" --resource-id "/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/res\ +ourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2" \ +--resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute hd-insight create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute virtual-machine'] = """ + type: group + short-summary: machinelearningservices machine-learning-compute sub group virtual-machine +""" + +helps['machinelearningservices machine-learning-compute virtual-machine create'] = """ + type: command + short-summary: "Creates or updates compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist \ +yet." + parameters: + - name: --sku + short-summary: "The sku of the workspace." + long-summary: | + Usage: --sku name=XX tier=XX + + name: Name of the sku + tier: Tier of the sku like Basic or Enterprise + - name: --administrator-account + short-summary: "Admin credentials for virtual machine" + long-summary: | + Usage: --administrator-account username=XX password=XX public-key-data=XX private-key-data=XX + + username: Username of admin account + password: Password of admin account + public-key-data: Public key data + private-key-data: Private key data + examples: + - name: Create AKS Compute + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a AML Compute + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a ComputeInstance Compute with minimal inputs + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Create a DataFactory Compute + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AKS Compute + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--location "eastus" --description "some compute" --resource-id "/subscriptions/34adfa4f-cedf-4dc0-ba29-b6d1a69ab345/res\ +ourcegroups/testrg123/providers/Microsoft.ContainerService/managedClusters/compute123-56826-c9b00420020b2" \ +--resource-group "testrg123" --workspace-name "workspaces123" + - name: Update a AML Compute + text: |- + az machinelearningservices machine-learning-compute virtual-machine create --compute-name "compute123" \ +--identity-type "SystemAssigned,UserAssigned" --identity-user-assigned-identities "{\\"/subscriptions/00000000-0000-000\ +0-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-\ +name\\":{}}" --location "eastus" --resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute update'] = """ + type: command + short-summary: "Updates properties of a compute. This call will overwrite a compute if it exists. This is a \ +nonrecoverable operation." + parameters: + - name: --scale-settings + short-summary: "Desired scale settings for the amlCompute." + long-summary: | + Usage: --scale-settings max-node-count=XX min-node-count=XX node-idle-time-before-scale-down=XX + + max-node-count: Required. Max number of nodes to use + min-node-count: Min number of nodes to use + node-idle-time-before-scale-down: Node Idle Time before scaling down amlCompute + examples: + - name: Update a AmlCompute Compute + text: |- + az machinelearningservices machine-learning-compute update --compute-name "compute123" --scale-settings \ +max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" --resource-group "testrg123" \ +--workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute delete'] = """ + type: command + short-summary: "Deletes specified Machine Learning compute." + examples: + - name: Delete Compute + text: |- + az machinelearningservices machine-learning-compute delete --compute-name "compute123" --resource-group \ +"testrg123" --underlying-resource-action "Delete" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute list-key'] = """ + type: command + short-summary: "Gets secrets related to Machine Learning compute (storage keys, service credentials, etc)." + examples: + - name: List AKS Compute Keys + text: |- + az machinelearningservices machine-learning-compute list-key --compute-name "compute123" \ +--resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute list-node'] = """ + type: command + short-summary: "Get the details (e.g IP address, port etc) of all the compute nodes in the compute." + examples: + - name: Get compute nodes information for a compute + text: |- + az machinelearningservices machine-learning-compute list-node --compute-name "compute123" \ +--resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute restart'] = """ + type: command + short-summary: "Posts a restart action to a compute instance" + examples: + - name: Restart ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute restart --compute-name "compute123" \ +--resource-group "testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute start'] = """ + type: command + short-summary: "Posts a start action to a compute instance" + examples: + - name: Start ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute start --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute stop'] = """ + type: command + short-summary: "Posts a stop action to a compute instance" + examples: + - name: Stop ComputeInstance Compute + text: |- + az machinelearningservices machine-learning-compute stop --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" +""" + +helps['machinelearningservices machine-learning-compute wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \ +machine-learning-compute is met. + examples: + - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \ +successfully created. + text: |- + az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" --created + - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \ +successfully updated. + text: |- + az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" --updated + - name: Pause executing next line of CLI script until the machinelearningservices machine-learning-compute is \ +successfully deleted. + text: |- + az machinelearningservices machine-learning-compute wait --compute-name "compute123" --resource-group \ +"testrg123" --workspace-name "workspaces123" --deleted +""" + +helps['machinelearningservices '] = """ + type: group + short-summary: machinelearningservices +""" + +helps['machinelearningservices list-sku'] = """ + type: command + short-summary: "Lists all skus with associated features" + examples: + - name: List Skus + text: |- + az machinelearningservices list-sku +""" + +helps['machinelearningservices private-endpoint-connection'] = """ + type: group + short-summary: machinelearningservices private-endpoint-connection +""" + +helps['machinelearningservices private-endpoint-connection show'] = """ + type: command + short-summary: "Gets the specified private endpoint connection associated with the workspace." + examples: + - name: WorkspaceGetPrivateEndpointConnection + text: |- + az machinelearningservices private-endpoint-connection show --name "{privateEndpointConnectionName}" \ +--resource-group "rg-1234" --workspace-name "testworkspace" +""" + +helps['machinelearningservices private-endpoint-connection delete'] = """ + type: command + short-summary: "Deletes the specified private endpoint connection associated with the workspace." + examples: + - name: WorkspaceDeletePrivateEndpointConnection + text: |- + az machinelearningservices private-endpoint-connection delete --name "{privateEndpointConnectionName}" \ +--resource-group "rg-1234" --workspace-name "testworkspace" +""" + +helps['machinelearningservices private-endpoint-connection put'] = """ + type: command + short-summary: "Update the state of specified private endpoint connection associated with the workspace." + parameters: + - name: --private-link-service-connection-state + short-summary: "A collection of information about the state of the connection between service consumer and \ +provider." + long-summary: | + Usage: --private-link-service-connection-state status=XX description=XX actions-required=XX + + status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service. + description: The reason for approval/rejection of the connection. + actions-required: A message indicating if changes on the service provider require any updates on the \ +consumer. + examples: + - name: WorkspacePutPrivateEndpointConnection + text: |- + az machinelearningservices private-endpoint-connection put --name "{privateEndpointConnectionName}" \ +--private-link-service-connection-state description="Auto-Approved" status="Approved" --resource-group "rg-1234" \ +--workspace-name "testworkspace" +""" + +helps['machinelearningservices private-endpoint-connection wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the machinelearningservices \ +private-endpoint-connection is met. + examples: + - name: Pause executing next line of CLI script until the machinelearningservices private-endpoint-connection is \ +successfully deleted. + text: |- + az machinelearningservices private-endpoint-connection wait --name "{privateEndpointConnectionName}" \ +--resource-group "rg-1234" --workspace-name "testworkspace" --deleted +""" + +helps['machinelearningservices private-link-resource'] = """ + type: group + short-summary: machinelearningservices private-link-resource +""" + +helps['machinelearningservices private-link-resource list'] = """ + type: command + short-summary: "Gets the private link resources that need to be created for a workspace." + examples: + - name: WorkspaceListPrivateLinkResources + text: |- + az machinelearningservices private-link-resource list --resource-group "rg-1234" --workspace-name \ +"testworkspace" +""" diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_params.py b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py new file mode 100644 index 00000000000..0a71a29bb62 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/_params.py @@ -0,0 +1,433 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-lines +# pylint: disable=too-many-statements + +from azure.cli.core.commands.parameters import ( + tags_type, + get_three_state_flag, + get_enum_type, + resource_group_name_type, + get_location_type +) +from azure.cli.core.commands.validators import ( + get_default_location_from_resource_group, + validate_file_or_dict +) +from azext_machinelearningservices.action import ( + AddSku, + AddSharedPrivateLinkResources, + AddEncryptionKeyVaultProperties, + AddValue, + AddSshSettings, + AddAdministratorAccount, + AddScaleSettings, + AddPrivateLinkServiceConnectionState +) + + +def load_arguments(self, _): + + with self.argument_context('machinelearningservices workspace list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('skiptoken', type=str, help='Continuation token for pagination.') + + with self.argument_context('machinelearningservices workspace show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.', id_part='name') + + with self.argument_context('machinelearningservices workspace create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('description', type=str, help='The description of this workspace.') + c.argument('friendly_name', type=str, help='The friendly name for this workspace. This name in mutable') + c.argument('key_vault', type=str, help='ARM id of the key vault associated with this workspace. This cannot be ' + 'changed once the workspace has been created') + c.argument('application_insights', type=str, help='ARM id of the application insights associated with this ' + 'workspace. This cannot be changed once the workspace has been created') + c.argument('container_registry', type=str, help='ARM id of the container registry associated with this ' + 'workspace. This cannot be changed once the workspace has been created') + c.argument('storage_account', type=str, help='ARM id of the storage account associated with this workspace. ' + 'This cannot be changed once the workspace has been created') + c.argument('discovery_url', type=str, help='Url for the discovery service to identify regional endpoints for ' + 'machine learning experimentation services') + c.argument('hbi_workspace', arg_type=get_three_state_flag(), help='The flag to signal HBI data in the ' + 'workspace and reduce diagnostic data collected by the service') + c.argument('image_build_compute', type=str, help='The compute name for image build') + c.argument('allow_public_access_when_behind_vnet', arg_type=get_three_state_flag(), help='The flag to indicate ' + 'whether to allow public access when behind VNet.') + c.argument('shared_private_link_resources', action=AddSharedPrivateLinkResources, nargs='*', help='The list of ' + 'shared private link resources in this workspace.') + c.argument('encryption_status', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Indicates whether or ' + 'not the encryption is enabled for the workspace.') + c.argument('encryption_key_vault_properties', action=AddEncryptionKeyVaultProperties, nargs='*', help='' + 'Customer Key vault properties.') + + with self.argument_context('machinelearningservices workspace update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.', id_part='name') + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('description', type=str, help='The description of this workspace.') + c.argument('friendly_name', type=str, help='The friendly name for this workspace.') + + with self.argument_context('machinelearningservices workspace delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.', id_part='name') + + with self.argument_context('machinelearningservices workspace list-key') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.') + + with self.argument_context('machinelearningservices workspace resync-key') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.', id_part='name') + + with self.argument_context('machinelearningservices workspace wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', options_list=['--name', '-n', '--workspace-name'], type=str, help='Name of Azure ' + 'Machine Learning workspace.', id_part='name') + + with self.argument_context('machinelearningservices workspace-feature list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + + with self.argument_context('machinelearningservices notebook prepare') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + + with self.argument_context('machinelearningservices usage list') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx)) + + with self.argument_context('machinelearningservices virtual-machine-size list') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx)) + + with self.argument_context('machinelearningservices quota list') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx)) + + with self.argument_context('machinelearningservices quota update') as c: + c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') + c.argument('value', action=AddValue, nargs='*', help='The list for update quota.') + + with self.argument_context('machinelearningservices workspace-connection list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('target', type=str, help='Target of the workspace connection.') + c.argument('category', type=str, help='Category of the workspace connection.') + + with self.argument_context('machinelearningservices workspace-connection show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('connection_name', type=str, help='Friendly name of the workspace connection', id_part='' + 'child_name_1') + + with self.argument_context('machinelearningservices workspace-connection create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('connection_name', type=str, help='Friendly name of the workspace connection') + c.argument('name', type=str, help='Friendly name of the workspace connection') + c.argument('category', type=str, help='Category of the workspace connection.') + c.argument('target', type=str, help='Target of the workspace connection.') + c.argument('auth_type', type=str, help='Authorization type of the workspace connection.') + c.argument('value', type=str, help='Value details of the workspace connection.') + + with self.argument_context('machinelearningservices workspace-connection delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('connection_name', type=str, help='Friendly name of the workspace connection', id_part='' + 'child_name_1') + + with self.argument_context('machinelearningservices machine-learning-compute list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('skiptoken', type=str, help='Continuation token for pagination.') + + with self.argument_context('machinelearningservices machine-learning-compute show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + + with self.argument_context('machinelearningservices machine-learning-compute aks create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('properties_properties', type=validate_file_or_dict, help='AKS properties Expected value: ' + 'json-string/@json-file.') + + with self.argument_context('machinelearningservices machine-learning-compute aml-compute create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('properties_properties', type=validate_file_or_dict, help='AML Compute properties Expected value: ' + 'json-string/@json-file.') + + with self.argument_context('machinelearningservices machine-learning-compute compute-instance create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('vm_size', type=str, help='Virtual Machine Size') + c.argument('application_sharing_policy', arg_type=get_enum_type(['Personal', 'Shared']), help='Policy for ' + 'sharing applications on this compute instance among users of parent workspace. If Personal, only ' + 'the creator can access applications on this compute instance. When Shared, any workspace user can ' + 'access applications on this instance depending on his/her assigned role.') + c.argument('ssh_settings', action=AddSshSettings, nargs='*', help='Specifies policy and settings for SSH ' + 'access.') + c.argument('subnet_id', type=str, help='The ID of the resource') + + with self.argument_context('machinelearningservices machine-learning-compute data-factory create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + + with self.argument_context('machinelearningservices machine-learning-compute data-lake-analytics create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('data_lake_store_account_name', type=str, help='DataLake Store Account Name') + + with self.argument_context('machinelearningservices machine-learning-compute databricks create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('databricks_access_token', type=str, help='Databricks access token') + + with self.argument_context('machinelearningservices machine-learning-compute hd-insight create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('ssh_port', type=int, help='Port open for ssh connections on the master node of the cluster.') + c.argument('address', type=str, help='Public IP address of the master node of the cluster.') + c.argument('administrator_account', action=AddAdministratorAccount, nargs='*', help='Admin credentials for ' + 'master node of the cluster') + + with self.argument_context('machinelearningservices machine-learning-compute virtual-machine create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + c.argument('location', arg_type=get_location_type(self.cli_ctx), + validator=get_default_location_from_resource_group) + c.argument('tags', tags_type) + c.argument('sku', action=AddSku, nargs='*', help='The sku of the workspace.') + c.argument('identity_type', arg_type=get_enum_type(['SystemAssigned', 'UserAssigned', '' + 'SystemAssigned,UserAssigned', 'None']), help='The ' + 'identity type.') + c.argument('identity_user_assigned_identities', type=validate_file_or_dict, help='The list of user identities ' + 'associated with resource. The user identity dictionary key references will be ARM resource ids in ' + 'the form: \'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.' + 'ManagedIdentity/userAssignedIdentities/{identityName}\'. Expected value: json-string/@json-file.') + c.argument('compute_location', type=str, help='Location for the underlying compute') + c.argument('description', type=str, help='The description of the Machine Learning compute.') + c.argument('resource_id', type=str, help='ARM resource id of the underlying compute') + c.argument('virtual_machine_size', type=str, help='Virtual Machine size') + c.argument('ssh_port', type=int, help='Port open for ssh connections.') + c.argument('address', type=str, help='Public IP address of the virtual machine.') + c.argument('administrator_account', action=AddAdministratorAccount, nargs='*', help='Admin credentials for ' + 'virtual machine') + + with self.argument_context('machinelearningservices machine-learning-compute update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + c.argument('scale_settings', action=AddScaleSettings, nargs='*', help='Desired scale settings for the ' + 'amlCompute.') + + with self.argument_context('machinelearningservices machine-learning-compute delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + c.argument('underlying_resource_action', arg_type=get_enum_type(['Delete', 'Detach']), help='Delete the ' + 'underlying compute if \'Delete\', or detach the underlying compute from workspace if \'Detach\'.') + + with self.argument_context('machinelearningservices machine-learning-compute list-key') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + + with self.argument_context('machinelearningservices machine-learning-compute list-node') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.') + + with self.argument_context('machinelearningservices machine-learning-compute restart') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + + with self.argument_context('machinelearningservices machine-learning-compute start') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + + with self.argument_context('machinelearningservices machine-learning-compute stop') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + + with self.argument_context('machinelearningservices machine-learning-compute wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('compute_name', type=str, help='Name of the Azure Machine Learning compute.', + id_part='child_name_1') + + with self.argument_context('machinelearningservices private-endpoint-connection show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The name of the private ' + 'endpoint connection associated with the workspace', id_part='child_name_1') + + with self.argument_context('machinelearningservices private-endpoint-connection delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The name of the private ' + 'endpoint connection associated with the workspace', id_part='child_name_1') + + with self.argument_context('machinelearningservices private-endpoint-connection put') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The name of the private ' + 'endpoint connection associated with the workspace', id_part='child_name_1') + c.argument('private_link_service_connection_state', action=AddPrivateLinkServiceConnectionState, nargs='*', + help='A collection of information about the state of the connection between service consumer and ' + 'provider.') + + with self.argument_context('machinelearningservices private-endpoint-connection wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.', id_part='name') + c.argument('private_endpoint_connection_name', options_list=['--name', '-n', '--private-endpoint-connection-nam' + 'e'], type=str, help='The name of the private ' + 'endpoint connection associated with the workspace', id_part='child_name_1') + + with self.argument_context('machinelearningservices private-link-resource list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('workspace_name', type=str, help='Name of Azure Machine Learning workspace.') diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py new file mode 100644 index 00000000000..b33a44c1ebf --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/_validators.py @@ -0,0 +1,9 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/action.py b/src/machinelearningservices/azext_machinelearningservices/generated/action.py new file mode 100644 index 00000000000..75d9afbab6f --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/action.py @@ -0,0 +1,228 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access + +import argparse +from collections import defaultdict +from knack.util import CLIError + + +class AddSku(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.sku = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'name': + d['name'] = v[0] + elif kl == 'tier': + d['tier'] = v[0] + return d + + +class AddSharedPrivateLinkResources(argparse._AppendAction): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + super(AddSharedPrivateLinkResources, self).__call__(parser, namespace, action, option_string) + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'name': + d['name'] = v[0] + elif kl == 'private-link-resource-id': + d['private_link_resource_id'] = v[0] + elif kl == 'group-id': + d['group_id'] = v[0] + elif kl == 'request-message': + d['request_message'] = v[0] + elif kl == 'status': + d['status'] = v[0] + return d + + +class AddEncryptionKeyVaultProperties(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.encryption_key_vault_properties = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'key-vault-arm-id': + d['key_vault_arm_id'] = v[0] + elif kl == 'key-identifier': + d['key_identifier'] = v[0] + elif kl == 'identity-client-id': + d['identity_client_id'] = v[0] + return d + + +class AddValue(argparse._AppendAction): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + super(AddValue, self).__call__(parser, namespace, action, option_string) + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'id': + d['id'] = v[0] + elif kl == 'type': + d['type'] = v[0] + elif kl == 'limit': + d['limit'] = v[0] + elif kl == 'unit': + d['unit'] = v[0] + return d + + +class AddSshSettings(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.ssh_settings = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['ssh_public_access'] = "Disabled" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'ssh-public-access': + d['ssh_public_access'] = v[0] + elif kl == 'admin-public-key': + d['admin_public_key'] = v[0] + return d + + +class AddAdministratorAccount(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.administrator_account = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'username': + d['username'] = v[0] + elif kl == 'password': + d['password'] = v[0] + elif kl == 'public-key-data': + d['public_key_data'] = v[0] + elif kl == 'private-key-data': + d['private_key_data'] = v[0] + return d + + +class AddScaleSettings(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.scale_settings = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['min_node_count'] = 0 + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'max-node-count': + d['max_node_count'] = v[0] + elif kl == 'min-node-count': + d['min_node_count'] = v[0] + elif kl == 'node-idle-time-before-scale-down': + d['node_idle_time_before_scale_down'] = v[0] + return d + + +class AddPrivateLinkServiceConnectionState(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.private_link_service_connection_state = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'status': + d['status'] = v[0] + elif kl == 'description': + d['description'] = v[0] + elif kl == 'actions-required': + d['actions_required'] = v[0] + return d diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/commands.py b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py new file mode 100644 index 00000000000..d7565574a23 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/commands.py @@ -0,0 +1,162 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=too-many-statements +# pylint: disable=too-many-locals + +from azure.cli.core.commands import CliCommandType + + +def load_command_table(self, _): + + from azext_machinelearningservices.generated._client_factory import cf_workspace + machinelearningservices_workspace = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_oper' + 'ations#WorkspaceOperations.{}', + client_factory=cf_workspace) + with self.command_group('machinelearningservices workspace', machinelearningservices_workspace, + client_factory=cf_workspace, is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_workspace_list') + g.custom_show_command('show', 'machinelearningservices_workspace_show') + g.custom_command('create', 'machinelearningservices_workspace_create', supports_no_wait=True) + g.custom_command('update', 'machinelearningservices_workspace_update') + g.custom_command('delete', 'machinelearningservices_workspace_delete', supports_no_wait=True, + confirmation=True) + g.custom_command('list-key', 'machinelearningservices_workspace_list_key') + g.custom_command('resync-key', 'machinelearningservices_workspace_resync_key') + g.custom_wait_command('wait', 'machinelearningservices_workspace_show') + + from azext_machinelearningservices.generated._client_factory import cf_workspace_feature + machinelearningservices_workspace_feature = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_feat' + 'ure_operations#WorkspaceFeatureOperations.{}', + client_factory=cf_workspace_feature) + with self.command_group('machinelearningservices workspace-feature', machinelearningservices_workspace_feature, + client_factory=cf_workspace_feature, is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_workspace_feature_list') + + from azext_machinelearningservices.generated._client_factory import cf_notebook + machinelearningservices_notebook = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._notebook_opera' + 'tions#NotebookOperations.{}', + client_factory=cf_notebook) + with self.command_group('machinelearningservices notebook', machinelearningservices_notebook, + client_factory=cf_notebook, is_experimental=True) as g: + g.custom_command('prepare', 'machinelearningservices_notebook_prepare') + + from azext_machinelearningservices.generated._client_factory import cf_usage + machinelearningservices_usage = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._usage_operatio' + 'ns#UsageOperations.{}', + client_factory=cf_usage) + with self.command_group('machinelearningservices usage', machinelearningservices_usage, client_factory=cf_usage, + is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_usage_list') + + from azext_machinelearningservices.generated._client_factory import cf_virtual_machine_size + machinelearningservices_virtual_machine_size = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._virtual_machin' + 'e_size_operations#VirtualMachineSizeOperations.{}', + client_factory=cf_virtual_machine_size) + with self.command_group('machinelearningservices virtual-machine-size', + machinelearningservices_virtual_machine_size, client_factory=cf_virtual_machine_size, + is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_virtual_machine_size_list') + + from azext_machinelearningservices.generated._client_factory import cf_quota + machinelearningservices_quota = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._quota_operatio' + 'ns#QuotaOperations.{}', + client_factory=cf_quota) + with self.command_group('machinelearningservices quota', machinelearningservices_quota, client_factory=cf_quota, + is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_quota_list') + g.custom_command('update', 'machinelearningservices_quota_update') + + from azext_machinelearningservices.generated._client_factory import cf_workspace_connection + machinelearningservices_workspace_connection = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._workspace_conn' + 'ection_operations#WorkspaceConnectionOperations.{}', + client_factory=cf_workspace_connection) + with self.command_group('machinelearningservices workspace-connection', + machinelearningservices_workspace_connection, client_factory=cf_workspace_connection, + is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_workspace_connection_list') + g.custom_show_command('show', 'machinelearningservices_workspace_connection_show') + g.custom_command('create', 'machinelearningservices_workspace_connection_create') + g.custom_command('delete', 'machinelearningservices_workspace_connection_delete', confirmation=True) + + from azext_machinelearningservices.generated._client_factory import cf_machine_learning_compute + machinelearningservices_machine_learning_compute = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._machine_learni' + 'ng_compute_operations#MachineLearningComputeOperations.{}', + client_factory=cf_machine_learning_compute) + with self.command_group('machinelearningservices machine-learning-compute', + machinelearningservices_machine_learning_compute, + client_factory=cf_machine_learning_compute, is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_machine_learning_compute_list') + g.custom_show_command('show', 'machinelearningservices_machine_learning_compute_show') + g.custom_command('aks create', 'machinelearningservices_machine_learning_compute_aks_create', + supports_no_wait=True) + g.custom_command('aml-compute create', 'machinelearningservices_machine_learning_compute_aml_compute_create', + supports_no_wait=True) + g.custom_command('compute-instance create', 'machinelearningservices_machine_learning_compute_compute_instance_' + 'create', supports_no_wait=True) + g.custom_command('data-factory create', 'machinelearningservices_machine_learning_compute_data_factory_create', + supports_no_wait=True) + g.custom_command('data-lake-analytics create', 'machinelearningservices_machine_learning_compute_data_lake_anal' + 'ytics_create', supports_no_wait=True) + g.custom_command('databricks create', 'machinelearningservices_machine_learning_compute_databricks_create', + supports_no_wait=True) + g.custom_command('hd-insight create', 'machinelearningservices_machine_learning_compute_hd_insight_create', + supports_no_wait=True) + g.custom_command('virtual-machine create', 'machinelearningservices_machine_learning_compute_virtual_machine_cr' + 'eate', supports_no_wait=True) + g.custom_command('update', 'machinelearningservices_machine_learning_compute_update', supports_no_wait=True) + g.custom_command('delete', 'machinelearningservices_machine_learning_compute_delete', supports_no_wait=True, + confirmation=True) + g.custom_command('list-key', 'machinelearningservices_machine_learning_compute_list_key') + g.custom_command('list-node', 'machinelearningservices_machine_learning_compute_list_node') + g.custom_command('restart', 'machinelearningservices_machine_learning_compute_restart') + g.custom_command('start', 'machinelearningservices_machine_learning_compute_start') + g.custom_command('stop', 'machinelearningservices_machine_learning_compute_stop') + g.custom_wait_command('wait', 'machinelearningservices_machine_learning_compute_show') + + from azext_machinelearningservices.generated._client_factory import cf_machinelearningservices + machinelearningservices_ = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._model_operatio' + 'ns#AzureMachineLearningWorkspacesOperationsMixin.{}', + client_factory=cf_machinelearningservices) + with self.command_group('machinelearningservices ', machinelearningservices_, + client_factory=cf_machinelearningservices, is_experimental=True) as g: + g.custom_command('list-sku', 'machinelearningservices__list_sku') + + from azext_machinelearningservices.generated._client_factory import cf_private_endpoint_connection + machinelearningservices_private_endpoint_connection = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_endpoi' + 'nt_connection_operations#PrivateEndpointConnectionOperations.{}', + client_factory=cf_private_endpoint_connection) + with self.command_group('machinelearningservices private-endpoint-connection', + machinelearningservices_private_endpoint_connection, + client_factory=cf_private_endpoint_connection, is_experimental=True) as g: + g.custom_show_command('show', 'machinelearningservices_private_endpoint_connection_show') + g.custom_command('delete', 'machinelearningservices_private_endpoint_connection_delete', supports_no_wait=True, + confirmation=True) + g.custom_command('put', 'machinelearningservices_private_endpoint_connection_put') + g.custom_wait_command('wait', 'machinelearningservices_private_endpoint_connection_show') + + from azext_machinelearningservices.generated._client_factory import cf_private_link_resource + machinelearningservices_private_link_resource = CliCommandType( + operations_tmpl='azext_machinelearningservices.vendored_sdks.machinelearningservices.operations._private_link_r' + 'esource_operations#PrivateLinkResourceOperations.{}', + client_factory=cf_private_link_resource) + with self.command_group('machinelearningservices private-link-resource', + machinelearningservices_private_link_resource, client_factory=cf_private_link_resource, + is_experimental=True) as g: + g.custom_command('list', 'machinelearningservices_private_link_resource_list') diff --git a/src/machinelearningservices/azext_machinelearningservices/generated/custom.py b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py new file mode 100644 index 00000000000..63145a13c31 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/generated/custom.py @@ -0,0 +1,617 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=line-too-long +# pylint: disable=too-many-lines + +from azure.cli.core.util import sdk_no_wait + + +def machinelearningservices_workspace_list(client, + resource_group_name=None, + skiptoken=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name, + skiptoken=skiptoken) + return client.list_by_subscription(skiptoken=skiptoken) + + +def machinelearningservices_workspace_show(client, + resource_group_name, + workspace_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def machinelearningservices_workspace_create(client, + resource_group_name, + workspace_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + description=None, + friendly_name=None, + key_vault=None, + application_insights=None, + container_registry=None, + storage_account=None, + discovery_url=None, + hbi_workspace=None, + image_build_compute=None, + allow_public_access_when_behind_vnet=None, + shared_private_link_resources=None, + encryption_status=None, + encryption_key_vault_properties=None, + no_wait=False): + if hbi_workspace is None: + hbi_workspace = False + if allow_public_access_when_behind_vnet is None: + allow_public_access_when_behind_vnet = False + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + location=location, + tags=tags, + sku=sku, + type=identity_type, + user_assigned_identities=identity_user_assigned_identities, + description=description, + friendly_name=friendly_name, + key_vault=key_vault, + application_insights=application_insights, + container_registry=container_registry, + storage_account=storage_account, + discovery_url=discovery_url, + hbi_workspace=hbi_workspace, + image_build_compute=image_build_compute, + allow_public_access_when_behind_vnet=allow_public_access_when_behind_vnet, + shared_private_link_resources=shared_private_link_resources, + status=encryption_status, + key_vault_properties=encryption_key_vault_properties) + + +def machinelearningservices_workspace_update(client, + resource_group_name, + workspace_name, + tags=None, + sku=None, + description=None, + friendly_name=None): + return client.update(resource_group_name=resource_group_name, + workspace_name=workspace_name, + tags=tags, + sku=sku, + description=description, + friendly_name=friendly_name) + + +def machinelearningservices_workspace_delete(client, + resource_group_name, + workspace_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def machinelearningservices_workspace_list_key(client, + resource_group_name, + workspace_name): + return client.list_key(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def machinelearningservices_workspace_resync_key(client, + resource_group_name, + workspace_name): + return client.resync_key(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def machinelearningservices_workspace_feature_list(client, + resource_group_name, + workspace_name): + return client.list(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def machinelearningservices_notebook_prepare(client, + resource_group_name, + workspace_name): + return client.begin_prepare(resource_group_name=resource_group_name, + workspace_name=workspace_name) + + +def machinelearningservices_usage_list(client, + location): + return client.list(location=location) + + +def machinelearningservices_virtual_machine_size_list(client, + location): + return client.list(location=location) + + +def machinelearningservices_quota_list(client, + location): + return client.list(location=location) + + +def machinelearningservices_quota_update(client, + location, + value=None): + return client.update(location=location, + value=value) + + +def machinelearningservices_workspace_connection_list(client, + resource_group_name, + workspace_name, + target=None, + category=None): + return client.list(resource_group_name=resource_group_name, + workspace_name=workspace_name, + target=target, + category=category) + + +def machinelearningservices_workspace_connection_show(client, + resource_group_name, + workspace_name, + connection_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name) + + +def machinelearningservices_workspace_connection_create(client, + resource_group_name, + workspace_name, + connection_name, + name=None, + category=None, + target=None, + auth_type=None, + value=None): + return client.create(resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + name=name, + category=category, + target=target, + auth_type=auth_type, + value=value) + + +def machinelearningservices_workspace_connection_delete(client, + resource_group_name, + workspace_name, + connection_name): + return client.delete(resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name) + + +def machinelearningservices_machine_learning_compute_list(client, + resource_group_name, + workspace_name, + skiptoken=None): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name, + skiptoken=skiptoken) + + +def machinelearningservices_machine_learning_compute_show(client, + resource_group_name, + workspace_name, + compute_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name) + + +def machinelearningservices_machine_learning_compute_aks_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + properties_properties=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'Aks' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['properties'] = properties_properties + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_aml_compute_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + properties_properties=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'AmlCompute' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['properties'] = properties_properties + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_compute_instance_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + vm_size=None, + application_sharing_policy=None, + ssh_settings=None, + subnet_id=None, + no_wait=False): + if application_sharing_policy is None: + application_sharing_policy = "Shared" + properties = {} + properties['compute_type'] = 'ComputeInstance' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['vm_size'] = vm_size + properties['application_sharing_policy'] = "Shared" if application_sharing_policy is None else application_sharing_policy + properties['ssh_settings'] = ssh_settings + properties['id'] = subnet_id + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_data_factory_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'DataFactory' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_data_lake_analytics_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + data_lake_store_account_name=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'DataLakeAnalytics' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['data_lake_store_account_name'] = data_lake_store_account_name + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_databricks_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + databricks_access_token=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'Databricks' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['databricks_access_token'] = databricks_access_token + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_hd_insight_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + ssh_port=None, + address=None, + administrator_account=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'HdInsight' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['ssh_port'] = ssh_port + properties['address'] = address + properties['administrator_account'] = administrator_account + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_virtual_machine_create(client, + resource_group_name, + workspace_name, + compute_name, + location=None, + tags=None, + sku=None, + identity_type=None, + identity_user_assigned_identities=None, + compute_location=None, + description=None, + resource_id=None, + virtual_machine_size=None, + ssh_port=None, + address=None, + administrator_account=None, + no_wait=False): + properties = {} + properties['compute_type'] = 'VirtualMachine' + properties['compute_location'] = compute_location + properties['description'] = description + properties['resource_id'] = resource_id + properties['virtual_machine_size'] = virtual_machine_size + properties['ssh_port'] = ssh_port + properties['address'] = address + properties['administrator_account'] = administrator_account + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties) + + +def machinelearningservices_machine_learning_compute_update(client, + resource_group_name, + workspace_name, + compute_name, + scale_settings=None, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_update, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + scale_settings=scale_settings) + + +def machinelearningservices_machine_learning_compute_delete(client, + resource_group_name, + workspace_name, + compute_name, + underlying_resource_action, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action) + + +def machinelearningservices_machine_learning_compute_list_key(client, + resource_group_name, + workspace_name, + compute_name): + return client.list_key(resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name) + + +def machinelearningservices_machine_learning_compute_list_node(client, + resource_group_name, + workspace_name, + compute_name): + return client.list_node(resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name) + + +def machinelearningservices_machine_learning_compute_restart(client, + resource_group_name, + workspace_name, + compute_name): + return client.restart(resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name) + + +def machinelearningservices_machine_learning_compute_start(client, + resource_group_name, + workspace_name, + compute_name): + return client.start(resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name) + + +def machinelearningservices_machine_learning_compute_stop(client, + resource_group_name, + workspace_name, + compute_name): + return client.stop(resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name) + + +def machinelearningservices__list_sku(client): + return client.list_sku() + + +def machinelearningservices_private_endpoint_connection_show(client, + resource_group_name, + workspace_name, + private_endpoint_connection_name): + return client.get(resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name) + + +def machinelearningservices_private_endpoint_connection_delete(client, + resource_group_name, + workspace_name, + private_endpoint_connection_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name) + + +def machinelearningservices_private_endpoint_connection_put(client, + resource_group_name, + workspace_name, + private_endpoint_connection_name, + private_link_service_connection_state=None): + return client.put(resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + private_endpoint=None, + private_link_service_connection_state=private_link_service_connection_state) + + +def machinelearningservices_private_link_resource_list(client, + resource_group_name, + workspace_name): + return client.list_by_workspace(resource_group_name=resource_group_name, + workspace_name=workspace_name) diff --git a/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/manual/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py new file mode 100644 index 00000000000..50e0627daff --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/tests/__init__.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- +import inspect +import logging +import os +import sys +import traceback +import datetime as dt + +from azure.core.exceptions import AzureError +from azure.cli.testsdk.exceptions import CliTestError, CliExecutionError, JMESPathCheckAssertionError + + +logger = logging.getLogger('azure.cli.testsdk') +logger.addHandler(logging.StreamHandler()) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) +exceptions = [] +test_map = dict() +SUCCESSED = "successed" +FAILED = "failed" + + +def try_manual(func): + def import_manual_function(origin_func): + from importlib import import_module + decorated_path = inspect.getfile(origin_func) + module_path = __path__[0] + if not decorated_path.startswith(module_path): + raise Exception("Decorator can only be used in submodules!") + manual_path = os.path.join( + decorated_path[module_path.rfind(os.path.sep) + 1:]) + manual_file_path, manual_file_name = os.path.split(manual_path) + module_name, _ = os.path.splitext(manual_file_name) + manual_module = "..manual." + \ + ".".join(manual_file_path.split(os.path.sep) + [module_name, ]) + return getattr(import_module(manual_module, package=__name__), origin_func.__name__) + + def get_func_to_call(): + func_to_call = func + try: + func_to_call = import_manual_function(func) + func_to_call = import_manual_function(func) + logger.info("Found manual override for %s(...)", func.__name__) + except (ImportError, AttributeError): + pass + return func_to_call + + def wrapper(*args, **kwargs): + func_to_call = get_func_to_call() + logger.info("running %s()...", func.__name__) + try: + test_map[func.__name__] = dict() + test_map[func.__name__]["result"] = SUCCESSED + test_map[func.__name__]["error_message"] = "" + test_map[func.__name__]["error_stack"] = "" + test_map[func.__name__]["error_normalized"] = "" + test_map[func.__name__]["start_dt"] = dt.datetime.utcnow() + ret = func_to_call(*args, **kwargs) + except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, + JMESPathCheckAssertionError) as e: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + test_map[func.__name__]["result"] = FAILED + test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] + test_map[func.__name__]["error_stack"] = traceback.format_exc().replace( + "\r\n", " ").replace("\n", " ")[:500] + logger.info("--------------------------------------") + logger.info("step exception: %s", e) + logger.error("--------------------------------------") + logger.error("step exception in %s: %s", func.__name__, e) + logger.info(traceback.format_exc()) + exceptions.append((func.__name__, sys.exc_info())) + else: + test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() + return ret + + if inspect.isclass(func): + return get_func_to_call() + return wrapper + + +def calc_coverage(filename): + filename = filename.split(".")[0] + coverage_name = filename + "_coverage.md" + with open(coverage_name, "w") as f: + f.write("|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt|\n") + total = len(test_map) + covered = 0 + for k, v in test_map.items(): + if not k.startswith("step_"): + total -= 1 + continue + if v["result"] == SUCCESSED: + covered += 1 + f.write("|{step_name}|{result}|{error_message}|{error_stack}|{error_normalized}|{start_dt}|" + "{end_dt}|\n".format(step_name=k, **v)) + f.write("Coverage: {}/{}\n".format(covered, total)) + print("Create coverage\n", file=sys.stderr) + + +def raise_if(): + if exceptions: + if len(exceptions) <= 1: + raise exceptions[0][1][1] + message = "{}\nFollowed with exceptions in other steps:\n".format(str(exceptions[0][1][1])) + message += "\n".join(["{}: {}".format(h[0], h[1][1]) for h in exceptions[1:]]) + raise exceptions[0][1][0](message).with_traceback(exceptions[0][1][2]) diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py new file mode 100644 index 00000000000..7571a8a5af7 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/tests/latest/test_machinelearningservices_scenario.py @@ -0,0 +1,1356 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import os +from azure.cli.testsdk import ScenarioTest +from .. import try_manual, raise_if, calc_coverage +from azure.cli.testsdk import ResourceGroupPreparer +from azure.cli.testsdk import StorageAccountPreparer + + +TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) + + +@try_manual +def setup(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + pass + + +# EXAMPLE: /Workspaces/put/Create Workspace +@try_manual +def step__workspaces_put_create_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace create ' + '--location "eastus2euap" ' + '--description "test description" ' + '--application-insights "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/microsoft.insights' + '/components/testinsights" ' + '--container-registry "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.ContainerR' + 'egistry/registries/testRegistry" ' + '--encryption-key-vault-properties identity-client-id="" key-identifier="https://testkv.vault.azure.net/ke' + 'ys/testkey/aabbccddee112233445566778899aabb" key-vault-arm-id="/subscriptions/{subscription_id}/resourceG' + 'roups/{rg}/providers/Microsoft.KeyVault/vaults/testkv" ' + '--encryption-status "Enabled" ' + '--friendly-name "HelloName" ' + '--hbi-workspace false ' + '--key-vault "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVault/vaults/tes' + 'tkv" ' + '--shared-private-link-resources name="testdbresource" private-link-resource-id="/subscriptions/{subscript' + 'ion_id}/resourceGroups/{rg}/providers/Microsoft.DocumentDB/databaseAccounts/testdbresource/privateLinkRes' + 'ources/{myPrivateLinkResource}" group-id="{myPrivateLinkResource}" request-message="Please approve" ' + 'status="Approved" ' + '--storage-account "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microsoft.Storage/sto' + 'rageAccounts/{sa}" ' + '--sku name="Basic" tier="Basic" ' + '--resource-group "{rg}" ' + '--name "{myWorkspace}"', + checks=[ + test.check("location", "eastus2euap", case_sensitive=False), + test.check("description", "test description", case_sensitive=False), + test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/micr" + "osoft.insights/components/testinsights", case_sensitive=False), + test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Micros" + "oft.ContainerRegistry/registries/testRegistry", case_sensitive=False), + test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False), + test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey" + "/aabbccddee112233445566778899aabb", case_sensitive=False), + test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGr" + "oups/{rg}/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False), + test.check("encryption.status", "Enabled", case_sensitive=False), + test.check("friendlyName", "HelloName", case_sensitive=False), + test.check("hbiWorkspace", False), + test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVa" + "ult/vaults/testkv", case_sensitive=False), + test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microso" + "ft.Storage/storageAccounts/{sa}", case_sensitive=False), + test.check("sku.name", "Basic", case_sensitive=False), + test.check("sku.tier", "Basic", case_sensitive=False), + test.check("name", "{myWorkspace}", case_sensitive=False), + ]) + test.cmd('az machinelearningservices workspace wait --created ' + '--resource-group "{rg}" ' + '--name "{myWorkspace}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create AKS Compute +@try_manual +def step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a AML Compute +@try_manual +def step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"remoteLoginPortPublicAccess\\":\\"NotSpecified\\",\\"scaleSettings\\":{{\\"' + 'maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTimeBeforeScaleDown\\":\\"PT5M\\"}},\\"vmPriority\\":\\' + '"Dedicated\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute +@try_manual +def step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"applicationSharingPolicy\\":\\"Personal\\",\\"sshSettings\\":{{\\"sshPublic' + 'Access\\":\\"Disabled\\"}},\\"subnet\\":\\"test-subnet-resource-id\\",\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a ComputeInstance Compute with minimal inputs +@try_manual +def step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--properties-properties "{{\\"vmSize\\":\\"STANDARD_NC6\\"}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Create a DataFactory Compute +@try_manual +def step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AKS Compute +@try_manual +def step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--location "eastus" ' + '--description "some compute" ' + '--properties-properties "{{\\"agentCount\\":4}}" ' + '--resource-id "/subscriptions/{subscription_id}/resourcegroups/{rg_3}/providers/Microsoft.ContainerServic' + 'e/managedClusters/compute123-56826-c9b00420020b2" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/put/Update a AML Compute +@try_manual +def step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute aks create ' + '--compute-name "compute123" ' + '--identity-type "SystemAssigned,UserAssigned" ' + '--identity-user-assigned-identities "{{\\"/subscriptions/00000000-0000-0000-0000-000000000000/resourceGro' + 'ups/myResourceGroup/providers/Microsoft.ManagedIdentity/userAssignedIdentities/identity-name\\":{{}}}}" ' + '--location "eastus" ' + '--properties-properties "{{\\"scaleSettings\\":{{\\"maxNodeCount\\":1,\\"minNodeCount\\":0,\\"nodeIdleTim' + 'eBeforeScaleDown\\":\\"PT5M\\"}}}}" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/get/Get Computes +@try_manual +def step__machinelearningcompute_get_get_computes(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute list ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/get/Get a AKS Compute +@try_manual +def step__machinelearningcompute_get_get_a_aks_compute(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute show ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/get/Get a AML Compute +@try_manual +def step__machinelearningcompute_get_get_a_aml_compute(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute show ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/get/Get a ComputeInstance +@try_manual +def step__machinelearningcompute_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute show ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/post/Get compute nodes information for a compute +@try_manual +def step__machinelearningcompute_post(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute list-node ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/post/List AKS Compute Keys +@try_manual +def step__machinelearningcompute_post2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute list-key ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/post/Restart ComputeInstance Compute +@try_manual +def step__machinelearningcompute_post3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute restart ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/post/Start ComputeInstance Compute +@try_manual +def step__machinelearningcompute_post4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute start ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/post/Stop ComputeInstance Compute +@try_manual +def step__machinelearningcompute_post5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute stop ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/patch/Update a AmlCompute Compute +@try_manual +def step__machinelearningcompute_patch(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute update ' + '--compute-name "compute123" ' + '--scale-settings max-node-count=4 min-node-count=4 node-idle-time-before-scale-down="PT5M" ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /Notebooks/post/Prepare Notebook +@try_manual +def step__notebooks_post_prepare_notebook(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices notebook prepare ' + '--resource-group "{rg_3}" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /PrivateEndpointConnections/put/WorkspacePutPrivateEndpointConnection +@try_manual +def step__privateendpointconnections_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices private-endpoint-connection put ' + '--name "{myPrivateEndpointConnection}" ' + '--private-link-service-connection-state description="Auto-Approved" status="Approved" ' + '--resource-group "{rg_7}" ' + '--workspace-name "{myWorkspace}"', + checks=[]) + + +# EXAMPLE: /PrivateEndpointConnections/get/WorkspaceGetPrivateEndpointConnection +@try_manual +def step__privateendpointconnections_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices private-endpoint-connection show ' + '--name "{myPrivateEndpointConnection}" ' + '--resource-group "{rg_7}" ' + '--workspace-name "{myWorkspace}"', + checks=[ + test.check("name", "{myPrivateEndpointConnection}", case_sensitive=False), + ]) + + +# EXAMPLE: /PrivateLinkResources/get/WorkspaceListPrivateLinkResources +@try_manual +def step__privatelinkresources_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices private-link-resource list ' + '--resource-group "{rg_7}" ' + '--workspace-name "{myWorkspace}"', + checks=[]) + + +# EXAMPLE: /Quotas/get/List workspace quotas by VMFamily +@try_manual +def step__quotas_get_list_workspace_quotas_by_vmfamily(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices quota list ' + '--location "eastus"', + checks=[]) + + +# EXAMPLE: /Quotas/post/update quotas +@try_manual +def step__quotas_post_update_quotas(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices quota update ' + '--location "eastus" ' + '--value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" ' + 'id="/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/wo' + 'rkspaces/{myWorkspace3}/quotas/{myQuota}" limit=100 unit="Count" ' + '--value type="Microsoft.MachineLearningServices/workspaces/dedicatedCores/quotas" ' + 'id="/subscriptions/{subscription_id}/resourceGroups/{rg_5}/providers/Microsoft.MachineLearningServices/wo' + 'rkspaces/{myWorkspace4}/quotas/{myQuota}" limit=200 unit="Count"', + checks=[]) + + +# EXAMPLE: /Usages/get/List Usages +@try_manual +def step__usages_get_list_usages(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices usage list ' + '--location "eastus"', + checks=[]) + + +# EXAMPLE: /VirtualMachineSizes/get/List VM Sizes +@try_manual +def step__virtualmachinesizes_get_list_vm_sizes(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices virtual-machine-size list ' + '--location "eastus"', + checks=[]) + + +# EXAMPLE: /Workspaces/get/Get Workspace +@try_manual +def step__workspaces_get_get_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace show ' + '--resource-group "{rg}" ' + '--name "{myWorkspace}"', + checks=[ + test.check("location", "eastus2euap", case_sensitive=False), + test.check("description", "test description", case_sensitive=False), + test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/micr" + "osoft.insights/components/testinsights", case_sensitive=False), + test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Micros" + "oft.ContainerRegistry/registries/testRegistry", case_sensitive=False), + test.check("encryption.keyVaultProperties.identityClientId", "", case_sensitive=False), + test.check("encryption.keyVaultProperties.keyIdentifier", "https://testkv.vault.azure.net/keys/testkey" + "/aabbccddee112233445566778899aabb", case_sensitive=False), + test.check("encryption.keyVaultProperties.keyVaultArmId", "/subscriptions/{subscription_id}/resourceGr" + "oups/{rg}/providers/Microsoft.KeyVault/vaults/testkv", case_sensitive=False), + test.check("encryption.status", "Enabled", case_sensitive=False), + test.check("friendlyName", "HelloName", case_sensitive=False), + test.check("hbiWorkspace", False), + test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVa" + "ult/vaults/testkv", case_sensitive=False), + test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microso" + "ft.Storage/storageAccounts/{sa}", case_sensitive=False), + test.check("name", "{myWorkspace}", case_sensitive=False), + ]) + + +# EXAMPLE: /Workspaces/get/Get Workspaces by Resource Group +@try_manual +def step__workspaces_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace list ' + '--resource-group "{rg}"', + checks=[ + test.check('length(@)', 1), + ]) + + +# EXAMPLE: /Workspaces/get/Get Workspaces by subscription +@try_manual +def step__workspaces_get2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace list ' + '-g ""', + checks=[ + test.check('length(@)', 1), + ]) + + +# EXAMPLE: /Workspaces/post/List Workspace Keys +@try_manual +def step__workspaces_post_list_workspace_keys(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace list-key ' + '--resource-group "{rg_3}" ' + '--name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /Workspaces/post/Resync Workspace Keys +@try_manual +def step__workspaces_post_resync_workspace_keys(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace resync-key ' + '--resource-group "{rg_3}" ' + '--name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /Workspaces/patch/Update Workspace +@try_manual +def step__workspaces_patch_update_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace update ' + '--description "new description" ' + '--friendly-name "New friendly name" ' + '--sku name="Enterprise" tier="Enterprise" ' + '--resource-group "{rg}" ' + '--name "{myWorkspace}"', + checks=[ + test.check("location", "eastus2euap", case_sensitive=False), + test.check("description", "new description", case_sensitive=False), + test.check("applicationInsights", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/micr" + "osoft.insights/components/testinsights", case_sensitive=False), + test.check("containerRegistry", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Micros" + "oft.ContainerRegistry/registries/testRegistry", case_sensitive=False), + test.check("friendlyName", "New friendly name", case_sensitive=False), + test.check("keyVault", "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.KeyVa" + "ult/vaults/testkv", case_sensitive=False), + test.check("storageAccount", "/subscriptions/{subscription_id}/resourceGroups/{rg_2}/providers/Microso" + "ft.Storage/storageAccounts/{sa}", case_sensitive=False), + test.check("sku.name", "Enterprise", case_sensitive=False), + test.check("sku.tier", "Enterprise", case_sensitive=False), + test.check("name", "{myWorkspace}", case_sensitive=False), + ]) + + +# EXAMPLE: /machinelearningservices /get/List Skus +@try_manual +def step__machinelearningservices__get_list_skus(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices list-sku', + checks=[]) + + +# EXAMPLE: /WorkspaceConnections/put/CreateWorkspaceConnection +@try_manual +def step__workspaceconnections_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace-connection create ' + '--connection-name "{myMachinelearningservices}" ' + '--name "{myMachinelearningservices}" ' + '--auth-type "PAT" ' + '--category "ACR" ' + '--target "www.facebook.com" ' + '--value "secrets" ' + '--resource-group "{rg_6}" ' + '--workspace-name "{myWorkspace5}"', + checks=[]) + + +# EXAMPLE: /WorkspaceConnections/get/GetWorkspaceConnection +@try_manual +def step__workspaceconnections_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace-connection show ' + '--connection-name "{myMachinelearningservices}" ' + '--resource-group "{rg_6}" ' + '--workspace-name "{myWorkspace5}"', + checks=[]) + + +# EXAMPLE: /WorkspaceConnections/get/ListWorkspaceConnections +@try_manual +def step__workspaceconnections_get2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace-connection list ' + '--category "ACR" ' + '--resource-group "{rg_6}" ' + '--target "www.facebook.com" ' + '--workspace-name "{myWorkspace5}"', + checks=[]) + + +# EXAMPLE: /WorkspaceFeatures/get/List Workspace features +@try_manual +def step__workspacefeatures_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace-feature list ' + '--resource-group "{rg_4}" ' + '--workspace-name "{myWorkspace}"', + checks=[]) + + +# EXAMPLE: /MachineLearningCompute/delete/Delete Compute +@try_manual +def step__machinelearningcompute_delete_delete_compute(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices machine-learning-compute delete -y ' + '--compute-name "compute123" ' + '--resource-group "{rg_3}" ' + '--underlying-resource-action "Delete" ' + '--workspace-name "{myWorkspace2}"', + checks=[]) + + +# EXAMPLE: /PrivateEndpointConnections/delete/WorkspaceDeletePrivateEndpointConnection +@try_manual +def step__privateendpointconnections_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices private-endpoint-connection delete -y ' + '--name "{myPrivateEndpointConnection}" ' + '--resource-group "{rg_7}" ' + '--workspace-name "{myWorkspace}"', + checks=[]) + + +# EXAMPLE: /WorkspaceConnections/delete/DeleteWorkspaceConnection +@try_manual +def step__workspaceconnections_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace-connection delete -y ' + '--connection-name "{myMachinelearningservices}" ' + '--resource-group "{rg_6}" ' + '--workspace-name "{myWorkspace5}"', + checks=[]) + + +# EXAMPLE: /Workspaces/delete/Delete Workspace +@try_manual +def step__workspaces_delete_delete_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + test.cmd('az machinelearningservices workspace delete -y ' + '--resource-group "{rg}" ' + '--name "{myWorkspace}"', + checks=[]) + + +@try_manual +def cleanup(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + pass + + +@try_manual +def call_scenario(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + setup(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_put_create_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put6(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_put7(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_get_get_computes(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_get_get_a_aks_compute(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_get_get_a_aml_compute(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_post(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_post2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_post3(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_post4(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_post5(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_patch(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__notebooks_post_prepare_notebook(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__privateendpointconnections_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__privateendpointconnections_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__privatelinkresources_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__quotas_get_list_workspace_quotas_by_vmfamily(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__quotas_post_update_quotas(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__usages_get_list_usages(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__virtualmachinesizes_get_list_vm_sizes(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_get_get_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_get2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_post_list_workspace_keys(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_post_resync_workspace_keys(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_patch_update_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningservices__get_list_skus(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaceconnections_put(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaceconnections_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaceconnections_get2(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspacefeatures_get(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__machinelearningcompute_delete_delete_compute(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__privateendpointconnections_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaceconnections_delete(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + step__workspaces_delete_delete_workspace(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + cleanup(test, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + + +@try_manual +class AzureMachineLearningWorkspacesScenarioTest(ScenarioTest): + + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_workspace-1234'[:7], key='rg', parameter_name='' + 'rg') + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_accountcrud-1234'[:7], key='rg_2', + parameter_name='rg_2') + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg'[:7], key='rg_5', parameter_name='rg_5') + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_testrg123'[:7], key='rg_3', parameter_name='' + 'rg_3') + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_myResourceGroup'[:7], key='rg_4', + parameter_name='rg_4') + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_resourceGroup-1'[:7], key='rg_6', + parameter_name='rg_6') + @ResourceGroupPreparer(name_prefix='clitestmachinelearningservices_rg-1234'[:7], key='rg_7', + parameter_name='rg_7') + @StorageAccountPreparer(name_prefix='clitestmachinelearningservices_testStorageAccount'[:7], key='sa', + resource_group_parameter_name='rg_2') + def test_machinelearningservices(self, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7): + + self.kwargs.update({ + 'subscription_id': self.get_subscription_id() + }) + + self.kwargs.update({ + 'myMachinelearningservices': 'connection-1', + 'myWorkspace6': 'default', + 'myPrivateLinkResource2': 'default', + 'myWorkspace3': 'demo_workspace1', + 'myWorkspace4': 'demo_workspace2', + 'myWorkspace': 'testworkspace', + 'myWorkspace2': 'workspaces123', + 'myWorkspace5': 'workspace-1', + 'myQuota': 'StandardDSv2Family', + 'myPrivateEndpointConnection': '{privateEndpointConnectionName}', + 'myPrivateLinkResource': 'Sql', + }) + + call_scenario(self, rg, rg_2, rg_5, rg_3, rg_4, rg_6, rg_7) + calc_coverage(__file__) + raise_if() diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py new file mode 100644 index 00000000000..c9cfdc73e77 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/__init__.py @@ -0,0 +1,12 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py new file mode 100644 index 00000000000..dad2c6eeb01 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/__init__.py @@ -0,0 +1,16 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces +__all__ = ['AzureMachineLearningWorkspaces'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py new file mode 100644 index 00000000000..a8ecde47dde --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_azure_machine_learning_workspaces.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + +from ._configuration import AzureMachineLearningWorkspacesConfiguration +from .operations import OperationOperations +from .operations import WorkspaceOperations +from .operations import WorkspaceFeatureOperations +from .operations import NotebookOperations +from .operations import UsageOperations +from .operations import VirtualMachineSizeOperations +from .operations import QuotaOperations +from .operations import WorkspaceConnectionOperations +from .operations import MachineLearningComputeOperations +from .operations import AzureMachineLearningWorkspacesOperationsMixin +from .operations import PrivateEndpointConnectionOperations +from .operations import PrivateLinkResourceOperations +from . import models + + +class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin): + """These APIs allow end users to operate on Azure Machine Learning Workspace resources. + + :ivar operation: OperationOperations operations + :vartype operation: azure_machine_learning_workspaces.operations.OperationOperations + :ivar workspace: WorkspaceOperations operations + :vartype workspace: azure_machine_learning_workspaces.operations.WorkspaceOperations + :ivar workspace_feature: WorkspaceFeatureOperations operations + :vartype workspace_feature: azure_machine_learning_workspaces.operations.WorkspaceFeatureOperations + :ivar notebook: NotebookOperations operations + :vartype notebook: azure_machine_learning_workspaces.operations.NotebookOperations + :ivar usage: UsageOperations operations + :vartype usage: azure_machine_learning_workspaces.operations.UsageOperations + :ivar virtual_machine_size: VirtualMachineSizeOperations operations + :vartype virtual_machine_size: azure_machine_learning_workspaces.operations.VirtualMachineSizeOperations + :ivar quota: QuotaOperations operations + :vartype quota: azure_machine_learning_workspaces.operations.QuotaOperations + :ivar workspace_connection: WorkspaceConnectionOperations operations + :vartype workspace_connection: azure_machine_learning_workspaces.operations.WorkspaceConnectionOperations + :ivar machine_learning_compute: MachineLearningComputeOperations operations + :vartype machine_learning_compute: azure_machine_learning_workspaces.operations.MachineLearningComputeOperations + :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations + :vartype private_endpoint_connection: azure_machine_learning_workspaces.operations.PrivateEndpointConnectionOperations + :ivar private_link_resource: PrivateLinkResourceOperations operations + :vartype private_link_resource: azure_machine_learning_workspaces.operations.PrivateLinkResourceOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: Azure subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.operation = OperationOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace = WorkspaceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_feature = WorkspaceFeatureOperations( + self._client, self._config, self._serialize, self._deserialize) + self.notebook = NotebookOperations( + self._client, self._config, self._serialize, self._deserialize) + self.usage = UsageOperations( + self._client, self._config, self._serialize, self._deserialize) + self.virtual_machine_size = VirtualMachineSizeOperations( + self._client, self._config, self._serialize, self._deserialize) + self.quota = QuotaOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_connection = WorkspaceConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.machine_learning_compute = MachineLearningComputeOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resource = PrivateLinkResourceOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureMachineLearningWorkspaces + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py new file mode 100644 index 00000000000..c6316b346bc --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/_configuration.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + from azure.core.credentials import TokenCredential + +VERSION = "unknown" + +class AzureMachineLearningWorkspacesConfiguration(Configuration): + """Configuration for AzureMachineLearningWorkspaces. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: Azure subscription identifier. + :type subscription_id: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2020-06-01" + self.credential_scopes = ['https://management.azure.com/.default'] + self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py new file mode 100644 index 00000000000..0250bddb2c5 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_machine_learning_workspaces_async import AzureMachineLearningWorkspaces +__all__ = ['AzureMachineLearningWorkspaces'] diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces_async.py new file mode 100644 index 00000000000..0f7c931a022 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_azure_machine_learning_workspaces_async.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration_async import AzureMachineLearningWorkspacesConfiguration +from .operations_async import OperationOperations +from .operations_async import WorkspaceOperations +from .operations_async import WorkspaceFeatureOperations +from .operations_async import NotebookOperations +from .operations_async import UsageOperations +from .operations_async import VirtualMachineSizeOperations +from .operations_async import QuotaOperations +from .operations_async import WorkspaceConnectionOperations +from .operations_async import MachineLearningComputeOperations +from .operations_async import AzureMachineLearningWorkspacesOperationsMixin +from .operations_async import PrivateEndpointConnectionOperations +from .operations_async import PrivateLinkResourceOperations +from .. import models + + +class AzureMachineLearningWorkspaces(AzureMachineLearningWorkspacesOperationsMixin): + """These APIs allow end users to operate on Azure Machine Learning Workspace resources. + + :ivar operation: OperationOperations operations + :vartype operation: azure_machine_learning_workspaces.aio.operations_async.OperationOperations + :ivar workspace: WorkspaceOperations operations + :vartype workspace: azure_machine_learning_workspaces.aio.operations_async.WorkspaceOperations + :ivar workspace_feature: WorkspaceFeatureOperations operations + :vartype workspace_feature: azure_machine_learning_workspaces.aio.operations_async.WorkspaceFeatureOperations + :ivar notebook: NotebookOperations operations + :vartype notebook: azure_machine_learning_workspaces.aio.operations_async.NotebookOperations + :ivar usage: UsageOperations operations + :vartype usage: azure_machine_learning_workspaces.aio.operations_async.UsageOperations + :ivar virtual_machine_size: VirtualMachineSizeOperations operations + :vartype virtual_machine_size: azure_machine_learning_workspaces.aio.operations_async.VirtualMachineSizeOperations + :ivar quota: QuotaOperations operations + :vartype quota: azure_machine_learning_workspaces.aio.operations_async.QuotaOperations + :ivar workspace_connection: WorkspaceConnectionOperations operations + :vartype workspace_connection: azure_machine_learning_workspaces.aio.operations_async.WorkspaceConnectionOperations + :ivar machine_learning_compute: MachineLearningComputeOperations operations + :vartype machine_learning_compute: azure_machine_learning_workspaces.aio.operations_async.MachineLearningComputeOperations + :ivar private_endpoint_connection: PrivateEndpointConnectionOperations operations + :vartype private_endpoint_connection: azure_machine_learning_workspaces.aio.operations_async.PrivateEndpointConnectionOperations + :ivar private_link_resource: PrivateLinkResourceOperations operations + :vartype private_link_resource: azure_machine_learning_workspaces.aio.operations_async.PrivateLinkResourceOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: Azure subscription identifier. + :type subscription_id: str + :param str base_url: Service URL + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = AzureMachineLearningWorkspacesConfiguration(credential, subscription_id, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.operation = OperationOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace = WorkspaceOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_feature = WorkspaceFeatureOperations( + self._client, self._config, self._serialize, self._deserialize) + self.notebook = NotebookOperations( + self._client, self._config, self._serialize, self._deserialize) + self.usage = UsageOperations( + self._client, self._config, self._serialize, self._deserialize) + self.virtual_machine_size = VirtualMachineSizeOperations( + self._client, self._config, self._serialize, self._deserialize) + self.quota = QuotaOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_connection = WorkspaceConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.machine_learning_compute = MachineLearningComputeOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.private_link_resource = PrivateLinkResourceOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureMachineLearningWorkspaces": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration_async.py new file mode 100644 index 00000000000..bd563241d52 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/_configuration_async.py @@ -0,0 +1,67 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +VERSION = "unknown" + +class AzureMachineLearningWorkspacesConfiguration(Configuration): + """Configuration for AzureMachineLearningWorkspaces. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: Azure subscription identifier. + :type subscription_id: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.api_version = "2020-06-01" + self.credential_scopes = ['https://management.azure.com/.default'] + self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + kwargs.setdefault('sdk_moniker', 'azuremachinelearningworkspaces/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/__init__.py new file mode 100644 index 00000000000..66a3715c10c --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/__init__.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operation_operations_async import OperationOperations +from ._workspace_operations_async import WorkspaceOperations +from ._workspace_feature_operations_async import WorkspaceFeatureOperations +from ._notebook_operations_async import NotebookOperations +from ._usage_operations_async import UsageOperations +from ._virtual_machine_size_operations_async import VirtualMachineSizeOperations +from ._quota_operations_async import QuotaOperations +from ._workspace_connection_operations_async import WorkspaceConnectionOperations +from ._machine_learning_compute_operations_async import MachineLearningComputeOperations +from ._azure_machine_learning_workspaces_operations_async import AzureMachineLearningWorkspacesOperationsMixin +from ._private_endpoint_connection_operations_async import PrivateEndpointConnectionOperations +from ._private_link_resource_operations_async import PrivateLinkResourceOperations + +__all__ = [ + 'OperationOperations', + 'WorkspaceOperations', + 'WorkspaceFeatureOperations', + 'NotebookOperations', + 'UsageOperations', + 'VirtualMachineSizeOperations', + 'QuotaOperations', + 'WorkspaceConnectionOperations', + 'MachineLearningComputeOperations', + 'AzureMachineLearningWorkspacesOperationsMixin', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourceOperations', +] diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_azure_machine_learning_workspaces_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_azure_machine_learning_workspaces_operations_async.py new file mode 100644 index 00000000000..f066fdbd964 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_azure_machine_learning_workspaces_operations_async.py @@ -0,0 +1,86 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class AzureMachineLearningWorkspacesOperationsMixin: + + def list_sku( + self, + **kwargs + ) -> AsyncIterable["models.SkuListResult"]: + """Lists all skus with associated features. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.SkuListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_sku.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('SkuListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_sku.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_machine_learning_compute_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_machine_learning_compute_operations_async.py new file mode 100644 index 00000000000..b07d807ecf4 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_machine_learning_compute_operations_async.py @@ -0,0 +1,876 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class MachineLearningComputeOperations: + """MachineLearningComputeOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + skiptoken: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.PaginatedComputeResourcesList"]: + """Gets computes in specified workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skiptoken: Continuation token for pagination. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs + ) -> "models.ComputeResource": + """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are + not returned - use 'keys' nested resource to get them. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeResource, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.ComputeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["models.Sku"] = None, + type: Optional[Union[str, "models.ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + properties: Optional["models.Compute"] = None, + **kwargs + ) -> "models.ComputeResource": + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.ComputeResource(location=location, tags=tags, sku=sku, type_identity_type=type, user_assigned_identities=user_assigned_identities, properties=properties) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'ComputeResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if response.status_code == 201: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["models.Sku"] = None, + type: Optional[Union[str, "models.ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + properties: Optional["models.Compute"] = None, + **kwargs + ) -> AsyncLROPoller["models.ComputeResource"]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param type: The identity type. + :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :param properties: Compute properties. + :type properties: ~azure_machine_learning_workspaces.models.Compute + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + scale_settings: Optional["models.ScaleSettings"] = None, + **kwargs + ) -> "models.ComputeResource": + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.ClusterUpdateParameters(scale_settings=scale_settings) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'ClusterUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + scale_settings: Optional["models.ScaleSettings"] = None, + **kwargs + ) -> AsyncLROPoller["models.ComputeResource"]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param scale_settings: Desired scale settings for the amlCompute. + :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + scale_settings=scale_settings, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, "models.UnderlyingResourceAction"], + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, "models.UnderlyingResourceAction"], + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes specified Machine Learning compute. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the + underlying compute from workspace if 'Detach'. + :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + async def list_node( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs + ) -> "models.AmlComputeNodesInformation": + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AmlComputeNodesInformation, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.AmlComputeNodesInformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_node.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_node.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore + + async def list_key( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs + ) -> "models.ComputeSecrets": + """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeSecrets, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeSecrets', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore + + async def start( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs + ) -> None: + """Posts a start action to a compute instance. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.start.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore + + async def stop( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs + ) -> None: + """Posts a stop action to a compute instance. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.stop.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore + + async def restart( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + **kwargs + ) -> None: + """Posts a restart action to a compute instance. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.restart.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_notebook_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_notebook_operations_async.py new file mode 100644 index 00000000000..a4173e6cf2c --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_notebook_operations_async.py @@ -0,0 +1,151 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class NotebookOperations: + """NotebookOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def _prepare_initial( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> Optional["models.NotebookResourceInfo"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._prepare_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore + + async def begin_prepare( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncLROPoller["models.NotebookResourceInfo"]: + """prepare. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either NotebookResourceInfo or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._prepare_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_operation_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_operation_operations_async.py new file mode 100644 index 00000000000..e717a2fcf60 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_operation_operations_async.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class OperationOperations: + """OperationOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs + ) -> AsyncIterable["models.OperationListResult"]: + """Lists all of the available Azure Machine Learning Workspaces REST API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_endpoint_connection_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_endpoint_connection_operations_async.py new file mode 100644 index 00000000000..83faaf3097d --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_endpoint_connection_operations_async.py @@ -0,0 +1,286 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionOperations: + """PrivateEndpointConnectionOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs + ) -> "models.PrivateEndpointConnection": + """Gets the specified private endpoint connection associated with the workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def put( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint: Optional["models.PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["models.PrivateLinkServiceConnectionState"] = None, + **kwargs + ) -> "models.PrivateEndpointConnection": + """Update the state of specified private endpoint connection associated with the workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. + :type private_endpoint_connection_name: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint + :param private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :type private_link_service_connection_state: ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _properties = models.PrivateEndpointConnection(private_endpoint=private_endpoint, private_link_service_connection_state=private_link_service_connection_state) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.put.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_properties, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes the specified private endpoint connection associated with the workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_link_resource_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_link_resource_operations_async.py new file mode 100644 index 00000000000..fb7aaef8c07 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_private_link_resource_operations_async.py @@ -0,0 +1,96 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourceOperations: + """PrivateLinkResourceOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list_by_workspace( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.PrivateLinkResourceListResult": + """Gets the private link resources that need to be created for a workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourceListResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_quota_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_quota_operations_async.py new file mode 100644 index 00000000000..a058522ff72 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_quota_operations_async.py @@ -0,0 +1,172 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class QuotaOperations: + """QuotaOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def update( + self, + location: str, + value: Optional[List["models.QuotaBaseProperties"]] = None, + **kwargs + ) -> "models.UpdateWorkspaceQuotasResult": + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. + :type location: str + :param value: The list for update quota. + :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.QuotaUpdateParameters(value=value) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'QuotaUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore + + def list( + self, + location: str, + **kwargs + ) -> AsyncIterable["models.ListWorkspaceQuotas"]: + """Gets the currently assigned Workspace Quotas based on VMFamily. + + :param location: The location for which resource usage is queried. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_usage_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_usage_operations_async.py new file mode 100644 index 00000000000..12e91db31a2 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_usage_operations_async.py @@ -0,0 +1,110 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class UsageOperations: + """UsageOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + location: str, + **kwargs + ) -> AsyncIterable["models.ListUsagesResult"]: + """Gets the current usage information as well as limits for AML resources for given subscription + and location. + + :param location: The location for which resource usage is queried. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListUsagesResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListUsagesResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_virtual_machine_size_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_virtual_machine_size_operations_async.py new file mode 100644 index 00000000000..9d76e81286d --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_virtual_machine_size_operations_async.py @@ -0,0 +1,92 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class VirtualMachineSizeOperations: + """VirtualMachineSizeOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + location: str, + **kwargs + ) -> "models.VirtualMachineSizeListResult": + """Returns supported VM Sizes in a location. + + :param location: The location upon which virtual-machine-sizes is queried. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: VirtualMachineSizeListResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_connection_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_connection_operations_async.py new file mode 100644 index 00000000000..9468f6ad6e6 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_connection_operations_async.py @@ -0,0 +1,323 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceConnectionOperations: + """WorkspaceConnectionOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.PaginatedWorkspaceConnectionsList"]: + """List all connections under a AML workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param target: Target of the workspace connection. + :type target: str + :param category: Category of the workspace connection. + :type category: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if target is not None: + query_parameters['target'] = self._serialize.query("target", target, 'str') + if category is not None: + query_parameters['category'] = self._serialize.query("category", category, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore + + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + name: Optional[str] = None, + category: Optional[str] = None, + target: Optional[str] = None, + auth_type: Optional[str] = None, + value: Optional[str] = None, + **kwargs + ) -> "models.WorkspaceConnection": + """Add a new workspace connection. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :param name: Friendly name of the workspace connection. + :type name: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.WorkspaceConnectionDto(name=name, category=category, target=target, auth_type=auth_type, value=value) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'WorkspaceConnectionDto') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + **kwargs + ) -> "models.WorkspaceConnection": + """Get the detail of a workspace connection. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + **kwargs + ) -> None: + """Delete a workspace connection. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_feature_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_feature_operations_async.py new file mode 100644 index 00000000000..2997b243ac8 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_feature_operations_async.py @@ -0,0 +1,114 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceFeatureOperations: + """WorkspaceFeatureOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncIterable["models.ListAmlUserFeatureResult"]: + """Lists all enabled features for a workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_operations_async.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_operations_async.py new file mode 100644 index 00000000000..a10002a9e10 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/aio/operations_async/_workspace_operations_async.py @@ -0,0 +1,746 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceOperations: + """WorkspaceOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.Workspace": + """Gets the properties of the specified machine learning workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + workspace_name: str, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["models.Sku"] = None, + type: Optional[Union[str, "models.ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + description: Optional[str] = None, + friendly_name: Optional[str] = None, + key_vault: Optional[str] = None, + application_insights: Optional[str] = None, + container_registry: Optional[str] = None, + storage_account: Optional[str] = None, + discovery_url: Optional[str] = None, + hbi_workspace: Optional[bool] = False, + image_build_compute: Optional[str] = None, + allow_public_access_when_behind_vnet: Optional[bool] = False, + shared_private_link_resources: Optional[List["models.SharedPrivateLinkResource"]] = None, + status: Optional[Union[str, "models.EncryptionStatus"]] = None, + key_vault_properties: Optional["models.KeyVaultProperties"] = None, + **kwargs + ) -> Optional["models.Workspace"]: + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.Workspace(location=location, tags=tags, sku=sku, type_identity_type=type, user_assigned_identities=user_assigned_identities, description=description, friendly_name=friendly_name, key_vault=key_vault, application_insights=application_insights, container_registry=container_registry, storage_account=storage_account, discovery_url=discovery_url, hbi_workspace=hbi_workspace, image_build_compute=image_build_compute, allow_public_access_when_behind_vnet=allow_public_access_when_behind_vnet, shared_private_link_resources=shared_private_link_resources, status=status, key_vault_properties=key_vault_properties) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'Workspace') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Workspace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["models.Sku"] = None, + type: Optional[Union[str, "models.ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + description: Optional[str] = None, + friendly_name: Optional[str] = None, + key_vault: Optional[str] = None, + application_insights: Optional[str] = None, + container_registry: Optional[str] = None, + storage_account: Optional[str] = None, + discovery_url: Optional[str] = None, + hbi_workspace: Optional[bool] = False, + image_build_compute: Optional[str] = None, + allow_public_access_when_behind_vnet: Optional[bool] = False, + shared_private_link_resources: Optional[List["models.SharedPrivateLinkResource"]] = None, + status: Optional[Union[str, "models.EncryptionStatus"]] = None, + key_vault_properties: Optional["models.KeyVaultProperties"] = None, + **kwargs + ) -> AsyncLROPoller["models.Workspace"]: + """Creates or updates a workspace with the specified parameters. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param type: The identity type. + :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. This name in mutable. + :type friendly_name: str + :param key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :type key_vault: str + :param application_insights: ARM id of the application insights associated with this workspace. + This cannot be changed once the workspace has been created. + :type application_insights: str + :param container_registry: ARM id of the container registry associated with this workspace. + This cannot be changed once the workspace has been created. + :type container_registry: str + :param storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :type storage_account: str + :param discovery_url: Url for the discovery service to identify regional endpoints for machine + learning experimentation services. + :type discovery_url: str + :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + collected by the service. + :type hbi_workspace: bool + :param image_build_compute: The compute name for image build. + :type image_build_compute: str + :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :type allow_public_access_when_behind_vnet: bool + :param shared_private_link_resources: The list of shared private link resources in this + workspace. + :type shared_private_link_resources: list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource] + :param status: Indicates whether or not the encryption is enabled for the workspace. + :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus + :param key_vault_properties: Customer Key vault properties. + :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure_machine_learning_workspaces.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + description=description, + friendly_name=friendly_name, + key_vault=key_vault, + application_insights=application_insights, + container_registry=container_registry, + storage_account=storage_account, + discovery_url=discovery_url, + hbi_workspace=hbi_workspace, + image_build_compute=image_build_compute, + allow_public_access_when_behind_vnet=allow_public_access_when_behind_vnet, + shared_private_link_resources=shared_private_link_resources, + status=status, + key_vault_properties=key_vault_properties, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + async def _delete_initial( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + async def begin_delete( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> AsyncLROPoller[None]: + """Deletes a machine learning workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + workspace_name: str, + tags: Optional[Dict[str, str]] = None, + sku: Optional["models.Sku"] = None, + description: Optional[str] = None, + friendly_name: Optional[str] = None, + **kwargs + ) -> "models.Workspace": + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param tags: The resource tags for the machine learning workspace. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. + :type friendly_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.WorkspaceUpdateParameters(tags=tags, sku=sku, description=description, friendly_name=friendly_name) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'WorkspaceUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + skiptoken: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.WorkspaceListResult"]: + """Lists all the available machine learning workspaces under the specified resource group. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param skiptoken: Continuation token for pagination. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + + async def list_key( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> "models.ListWorkspaceKeysResult": + """Lists all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListWorkspaceKeysResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore + + async def resync_key( + self, + resource_group_name: str, + workspace_name: str, + **kwargs + ) -> None: + """Resync all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.resync_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + resync_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore + + def list_by_subscription( + self, + skiptoken: Optional[str] = None, + **kwargs + ) -> AsyncIterable["models.WorkspaceListResult"]: + """Lists all the available machine learning workspaces under the specified subscription. + + :param skiptoken: Continuation token for pagination. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py new file mode 100644 index 00000000000..d64c9c8d6fe --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/__init__.py @@ -0,0 +1,303 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import Aks + from ._models_py3 import AksComputeSecrets + from ._models_py3 import AksNetworkingConfiguration + from ._models_py3 import AksProperties + from ._models_py3 import AmlCompute + from ._models_py3 import AmlComputeNodeInformation + from ._models_py3 import AmlComputeNodesInformation + from ._models_py3 import AmlComputeProperties + from ._models_py3 import AmlUserFeature + from ._models_py3 import ClusterUpdateParameters + from ._models_py3 import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties + from ._models_py3 import Compute + from ._models_py3 import ComputeInstance + from ._models_py3 import ComputeInstanceApplication + from ._models_py3 import ComputeInstanceConnectivityEndpoints + from ._models_py3 import ComputeInstanceCreatedBy + from ._models_py3 import ComputeInstanceSshSettings + from ._models_py3 import ComputeNodesInformation + from ._models_py3 import ComputeResource + from ._models_py3 import ComputeSecrets + from ._models_py3 import DataFactory + from ._models_py3 import DataLakeAnalytics + from ._models_py3 import Databricks + from ._models_py3 import DatabricksComputeSecrets + from ._models_py3 import ErrorDetail + from ._models_py3 import ErrorResponse + from ._models_py3 import HdInsight + from ._models_py3 import KeyVaultProperties + from ._models_py3 import ListAmlUserFeatureResult + from ._models_py3 import ListUsagesResult + from ._models_py3 import ListWorkspaceKeysResult + from ._models_py3 import ListWorkspaceQuotas + from ._models_py3 import MachineLearningServiceError + from ._models_py3 import NodeStateCounts + from ._models_py3 import NotebookListCredentialsResult + from ._models_py3 import NotebookPreparationError + from ._models_py3 import NotebookResourceInfo + from ._models_py3 import Operation + from ._models_py3 import OperationDisplay + from ._models_py3 import OperationListResult + from ._models_py3 import PaginatedComputeResourcesList + from ._models_py3 import PaginatedWorkspaceConnectionsList + from ._models_py3 import Password + from ._models_py3 import PrivateEndpoint + from ._models_py3 import PrivateEndpointConnection + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourceListResult + from ._models_py3 import PrivateLinkServiceConnectionState + from ._models_py3 import QuotaBaseProperties + from ._models_py3 import QuotaUpdateParameters + from ._models_py3 import RegistryListCredentialsResult + from ._models_py3 import Resource + from ._models_py3 import ResourceId + from ._models_py3 import ResourceName + from ._models_py3 import ResourceQuota + from ._models_py3 import ResourceSkuLocationInfo + from ._models_py3 import ResourceSkuZoneDetails + from ._models_py3 import Restriction + from ._models_py3 import SSLConfiguration + from ._models_py3 import ScaleSettings + from ._models_py3 import ServicePrincipalCredentials + from ._models_py3 import SharedPrivateLinkResource + from ._models_py3 import Sku + from ._models_py3 import SkuCapability + from ._models_py3 import SkuListResult + from ._models_py3 import SkuSettings + from ._models_py3 import SystemService + from ._models_py3 import UpdateWorkspaceQuotas + from ._models_py3 import UpdateWorkspaceQuotasResult + from ._models_py3 import Usage + from ._models_py3 import UsageName + from ._models_py3 import UserAccountCredentials + from ._models_py3 import VirtualMachine + from ._models_py3 import VirtualMachineSecrets + from ._models_py3 import VirtualMachineSize + from ._models_py3 import VirtualMachineSizeListResult + from ._models_py3 import VirtualMachineSshCredentials + from ._models_py3 import Workspace + from ._models_py3 import WorkspaceConnection + from ._models_py3 import WorkspaceConnectionDto + from ._models_py3 import WorkspaceListResult + from ._models_py3 import WorkspaceSku + from ._models_py3 import WorkspaceUpdateParameters +except (SyntaxError, ImportError): + from ._models import Aks # type: ignore + from ._models import AksComputeSecrets # type: ignore + from ._models import AksNetworkingConfiguration # type: ignore + from ._models import AksProperties # type: ignore + from ._models import AmlCompute # type: ignore + from ._models import AmlComputeNodeInformation # type: ignore + from ._models import AmlComputeNodesInformation # type: ignore + from ._models import AmlComputeProperties # type: ignore + from ._models import AmlUserFeature # type: ignore + from ._models import ClusterUpdateParameters # type: ignore + from ._models import ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties # type: ignore + from ._models import Compute # type: ignore + from ._models import ComputeInstance # type: ignore + from ._models import ComputeInstanceApplication # type: ignore + from ._models import ComputeInstanceConnectivityEndpoints # type: ignore + from ._models import ComputeInstanceCreatedBy # type: ignore + from ._models import ComputeInstanceSshSettings # type: ignore + from ._models import ComputeNodesInformation # type: ignore + from ._models import ComputeResource # type: ignore + from ._models import ComputeSecrets # type: ignore + from ._models import DataFactory # type: ignore + from ._models import DataLakeAnalytics # type: ignore + from ._models import Databricks # type: ignore + from ._models import DatabricksComputeSecrets # type: ignore + from ._models import ErrorDetail # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import HdInsight # type: ignore + from ._models import KeyVaultProperties # type: ignore + from ._models import ListAmlUserFeatureResult # type: ignore + from ._models import ListUsagesResult # type: ignore + from ._models import ListWorkspaceKeysResult # type: ignore + from ._models import ListWorkspaceQuotas # type: ignore + from ._models import MachineLearningServiceError # type: ignore + from ._models import NodeStateCounts # type: ignore + from ._models import NotebookListCredentialsResult # type: ignore + from ._models import NotebookPreparationError # type: ignore + from ._models import NotebookResourceInfo # type: ignore + from ._models import Operation # type: ignore + from ._models import OperationDisplay # type: ignore + from ._models import OperationListResult # type: ignore + from ._models import PaginatedComputeResourcesList # type: ignore + from ._models import PaginatedWorkspaceConnectionsList # type: ignore + from ._models import Password # type: ignore + from ._models import PrivateEndpoint # type: ignore + from ._models import PrivateEndpointConnection # type: ignore + from ._models import PrivateLinkResource # type: ignore + from ._models import PrivateLinkResourceListResult # type: ignore + from ._models import PrivateLinkServiceConnectionState # type: ignore + from ._models import QuotaBaseProperties # type: ignore + from ._models import QuotaUpdateParameters # type: ignore + from ._models import RegistryListCredentialsResult # type: ignore + from ._models import Resource # type: ignore + from ._models import ResourceId # type: ignore + from ._models import ResourceName # type: ignore + from ._models import ResourceQuota # type: ignore + from ._models import ResourceSkuLocationInfo # type: ignore + from ._models import ResourceSkuZoneDetails # type: ignore + from ._models import Restriction # type: ignore + from ._models import SSLConfiguration # type: ignore + from ._models import ScaleSettings # type: ignore + from ._models import ServicePrincipalCredentials # type: ignore + from ._models import SharedPrivateLinkResource # type: ignore + from ._models import Sku # type: ignore + from ._models import SkuCapability # type: ignore + from ._models import SkuListResult # type: ignore + from ._models import SkuSettings # type: ignore + from ._models import SystemService # type: ignore + from ._models import UpdateWorkspaceQuotas # type: ignore + from ._models import UpdateWorkspaceQuotasResult # type: ignore + from ._models import Usage # type: ignore + from ._models import UsageName # type: ignore + from ._models import UserAccountCredentials # type: ignore + from ._models import VirtualMachine # type: ignore + from ._models import VirtualMachineSecrets # type: ignore + from ._models import VirtualMachineSize # type: ignore + from ._models import VirtualMachineSizeListResult # type: ignore + from ._models import VirtualMachineSshCredentials # type: ignore + from ._models import Workspace # type: ignore + from ._models import WorkspaceConnection # type: ignore + from ._models import WorkspaceConnectionDto # type: ignore + from ._models import WorkspaceListResult # type: ignore + from ._models import WorkspaceSku # type: ignore + from ._models import WorkspaceUpdateParameters # type: ignore + +from ._azure_machine_learning_workspaces_enums import ( + AllocationState, + ApplicationSharingPolicy, + ComputeInstanceState, + ComputeType, + EncryptionStatus, + NodeState, + PrivateEndpointConnectionProvisioningState, + PrivateEndpointServiceConnectionStatus, + ProvisioningState, + QuotaUnit, + ReasonCode, + RemoteLoginPortPublicAccess, + ResourceIdentityType, + SSLConfigurationStatus, + SshPublicAccess, + Status, + UnderlyingResourceAction, + UsageUnit, + VmPriority, +) + +__all__ = [ + 'Aks', + 'AksComputeSecrets', + 'AksNetworkingConfiguration', + 'AksProperties', + 'AmlCompute', + 'AmlComputeNodeInformation', + 'AmlComputeNodesInformation', + 'AmlComputeProperties', + 'AmlUserFeature', + 'ClusterUpdateParameters', + 'ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties', + 'Compute', + 'ComputeInstance', + 'ComputeInstanceApplication', + 'ComputeInstanceConnectivityEndpoints', + 'ComputeInstanceCreatedBy', + 'ComputeInstanceSshSettings', + 'ComputeNodesInformation', + 'ComputeResource', + 'ComputeSecrets', + 'DataFactory', + 'DataLakeAnalytics', + 'Databricks', + 'DatabricksComputeSecrets', + 'ErrorDetail', + 'ErrorResponse', + 'HdInsight', + 'KeyVaultProperties', + 'ListAmlUserFeatureResult', + 'ListUsagesResult', + 'ListWorkspaceKeysResult', + 'ListWorkspaceQuotas', + 'MachineLearningServiceError', + 'NodeStateCounts', + 'NotebookListCredentialsResult', + 'NotebookPreparationError', + 'NotebookResourceInfo', + 'Operation', + 'OperationDisplay', + 'OperationListResult', + 'PaginatedComputeResourcesList', + 'PaginatedWorkspaceConnectionsList', + 'Password', + 'PrivateEndpoint', + 'PrivateEndpointConnection', + 'PrivateLinkResource', + 'PrivateLinkResourceListResult', + 'PrivateLinkServiceConnectionState', + 'QuotaBaseProperties', + 'QuotaUpdateParameters', + 'RegistryListCredentialsResult', + 'Resource', + 'ResourceId', + 'ResourceName', + 'ResourceQuota', + 'ResourceSkuLocationInfo', + 'ResourceSkuZoneDetails', + 'Restriction', + 'SSLConfiguration', + 'ScaleSettings', + 'ServicePrincipalCredentials', + 'SharedPrivateLinkResource', + 'Sku', + 'SkuCapability', + 'SkuListResult', + 'SkuSettings', + 'SystemService', + 'UpdateWorkspaceQuotas', + 'UpdateWorkspaceQuotasResult', + 'Usage', + 'UsageName', + 'UserAccountCredentials', + 'VirtualMachine', + 'VirtualMachineSecrets', + 'VirtualMachineSize', + 'VirtualMachineSizeListResult', + 'VirtualMachineSshCredentials', + 'Workspace', + 'WorkspaceConnection', + 'WorkspaceConnectionDto', + 'WorkspaceListResult', + 'WorkspaceSku', + 'WorkspaceUpdateParameters', + 'AllocationState', + 'ApplicationSharingPolicy', + 'ComputeInstanceState', + 'ComputeType', + 'EncryptionStatus', + 'NodeState', + 'PrivateEndpointConnectionProvisioningState', + 'PrivateEndpointServiceConnectionStatus', + 'ProvisioningState', + 'QuotaUnit', + 'ReasonCode', + 'RemoteLoginPortPublicAccess', + 'ResourceIdentityType', + 'SSLConfigurationStatus', + 'SshPublicAccess', + 'Status', + 'UnderlyingResourceAction', + 'UsageUnit', + 'VmPriority', +] diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py new file mode 100644 index 00000000000..c69bcf7ab99 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_azure_machine_learning_workspaces_enums.py @@ -0,0 +1,215 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class AllocationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Allocation state of the compute. Possible values are: steady - Indicates that the compute is + not resizing. There are no changes to the number of compute nodes in the compute in progress. A + compute enters this state when it is created and when no operations are being performed on the + compute to change the number of compute nodes. resizing - Indicates that the compute is + resizing; that is, compute nodes are being added to or removed from the compute. + """ + + STEADY = "Steady" + RESIZING = "Resizing" + +class ApplicationSharingPolicy(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Policy for sharing applications on this compute instance among users of parent workspace. If + Personal, only the creator can access applications on this compute instance. When Shared, any + workspace user can access applications on this instance depending on his/her assigned role. + """ + + PERSONAL = "Personal" + SHARED = "Shared" + +class ComputeInstanceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Current state of a ComputeInstance. + """ + + CREATING = "Creating" + CREATE_FAILED = "CreateFailed" + DELETING = "Deleting" + RUNNING = "Running" + RESTARTING = "Restarting" + RESTART_FAILED = "RestartFailed" + JOB_RUNNING = "JobRunning" + SETTING_UP = "SettingUp" + STARTING = "Starting" + START_FAILED = "StartFailed" + STOP_FAILED = "StopFailed" + STOPPED = "Stopped" + STOPPING = "Stopping" + USER_SETTING_UP = "UserSettingUp" + UNKNOWN = "Unknown" + UNUSABLE = "Unusable" + +class ComputeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of compute + """ + + AKS = "AKS" + AML_COMPUTE = "AmlCompute" + COMPUTE_INSTANCE = "ComputeInstance" + DATA_FACTORY = "DataFactory" + VIRTUAL_MACHINE = "VirtualMachine" + HD_INSIGHT = "HDInsight" + DATABRICKS = "Databricks" + DATA_LAKE_ANALYTICS = "DataLakeAnalytics" + +class EncryptionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Indicates whether or not the encryption is enabled for the workspace. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class NodeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """State of the compute node. Values are idle, running, preparing, unusable, leaving and + preempted. + """ + + IDLE = "idle" + RUNNING = "running" + PREPARING = "preparing" + UNUSABLE = "unusable" + LEAVING = "leaving" + PREEMPTED = "preempted" + +class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The current provisioning state. + """ + + SUCCEEDED = "Succeeded" + CREATING = "Creating" + DELETING = "Deleting" + FAILED = "Failed" + +class PrivateEndpointServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The private endpoint connection status. + """ + + PENDING = "Pending" + APPROVED = "Approved" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" + TIMEOUT = "Timeout" + +class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The current deployment state of workspace resource. The provisioningState is to indicate states + for resource provisioning. + """ + + UNKNOWN = "Unknown" + UPDATING = "Updating" + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + +class QuotaUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """An enum describing the unit of quota measurement. + """ + + COUNT = "Count" + +class ReasonCode(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The reason for the restriction. + """ + + NOT_SPECIFIED = "NotSpecified" + NOT_AVAILABLE_FOR_REGION = "NotAvailableForRegion" + NOT_AVAILABLE_FOR_SUBSCRIPTION = "NotAvailableForSubscription" + +class RemoteLoginPortPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh + port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is + open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed + on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be + default only during cluster creation time, after creation it will be either enabled or + disabled. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + NOT_SPECIFIED = "NotSpecified" + +class ResourceIdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The identity type. + """ + + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + NONE = "None" + +class SshPublicAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh + port is closed on this instance. Enabled - Indicates that the public ssh port is open and + accessible according to the VNet/subnet policy if applicable. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + +class SSLConfigurationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Enable or disable ssl for scoring + """ + + DISABLED = "Disabled" + ENABLED = "Enabled" + +class Status(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Status of update workspace quota. + """ + + UNDEFINED = "Undefined" + SUCCESS = "Success" + FAILURE = "Failure" + INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum" + INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit" + INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName" + OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku" + OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" + +class UnderlyingResourceAction(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + DELETE = "Delete" + DETACH = "Detach" + +class UsageUnit(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """An enum describing the unit of usage measurement. + """ + + COUNT = "Count" + +class VmPriority(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Virtual Machine priority + """ + + DEDICATED = "Dedicated" + LOW_PRIORITY = "LowPriority" diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py new file mode 100644 index 00000000000..f7ad3b1c343 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models.py @@ -0,0 +1,3430 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class Compute(msrest.serialization.Model): + """Machine Learning compute object. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, VirtualMachine. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + } + + _subtype_map = { + 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'VirtualMachine': 'VirtualMachine'} + } + + def __init__( + self, + **kwargs + ): + super(Compute, self).__init__(**kwargs) + self.compute_type = None # type: Optional[str] + self.compute_location = kwargs.get('compute_location', None) + self.provisioning_state = None + self.description = kwargs.get('description', None) + self.created_on = None + self.modified_on = None + self.resource_id = kwargs.get('resource_id', None) + self.provisioning_errors = None + self.is_attached_compute = None + + +class Aks(Compute): + """A Machine Learning compute based on AKS. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param properties: AKS properties. + :type properties: ~azure_machine_learning_workspaces.models.AksProperties + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'AksProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(Aks, self).__init__(**kwargs) + self.compute_type = 'AKS' # type: str + self.properties = kwargs.get('properties', None) + + +class ComputeSecrets(msrest.serialization.Model): + """Secrets related to a Machine Learning compute. Might differ for every type of compute. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + _subtype_map = { + 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} + } + + def __init__( + self, + **kwargs + ): + super(ComputeSecrets, self).__init__(**kwargs) + self.compute_type = None # type: Optional[str] + + +class AksComputeSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on AKS. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :type user_kube_config: str + :param admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :type admin_kube_config: str + :param image_pull_secret_name: Image registry pull secret. + :type image_pull_secret_name: str + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, + 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, + 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AksComputeSecrets, self).__init__(**kwargs) + self.compute_type = 'AKS' # type: str + self.user_kube_config = kwargs.get('user_kube_config', None) + self.admin_kube_config = kwargs.get('admin_kube_config', None) + self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) + + +class AksNetworkingConfiguration(msrest.serialization.Model): + """Advance configuration for AKS networking. + + :param subnet_id: Virtual network subnet resource ID the compute nodes belong to. + :type subnet_id: str + :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must + not overlap with any Subnet IP ranges. + :type service_cidr: str + :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within + the Kubernetes service address range specified in serviceCidr. + :type dns_service_ip: str + :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It + must not overlap with any Subnet IP ranges or the Kubernetes service address range. + :type docker_bridge_cidr: str + """ + + _validation = { + 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, + 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + } + + _attribute_map = { + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, + 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, + 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, + 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AksNetworkingConfiguration, self).__init__(**kwargs) + self.subnet_id = kwargs.get('subnet_id', None) + self.service_cidr = kwargs.get('service_cidr', None) + self.dns_service_ip = kwargs.get('dns_service_ip', None) + self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None) + + +class AksProperties(msrest.serialization.Model): + """AKS properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param cluster_fqdn: Cluster full qualified domain name. + :type cluster_fqdn: str + :ivar system_services: System services. + :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService] + :param agent_count: Number of agents. + :type agent_count: int + :param agent_vm_size: Agent virtual machine size. + :type agent_vm_size: str + :param ssl_configuration: SSL configuration. + :type ssl_configuration: ~azure_machine_learning_workspaces.models.SSLConfiguration + :param aks_networking_configuration: AKS networking configuration for vnet. + :type aks_networking_configuration: + ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration + """ + + _validation = { + 'system_services': {'readonly': True}, + 'agent_count': {'minimum': 1}, + } + + _attribute_map = { + 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, + 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, + 'agent_count': {'key': 'agentCount', 'type': 'int'}, + 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, + 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SSLConfiguration'}, + 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, + } + + def __init__( + self, + **kwargs + ): + super(AksProperties, self).__init__(**kwargs) + self.cluster_fqdn = kwargs.get('cluster_fqdn', None) + self.system_services = None + self.agent_count = kwargs.get('agent_count', None) + self.agent_vm_size = kwargs.get('agent_vm_size', None) + self.ssl_configuration = kwargs.get('ssl_configuration', None) + self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None) + + +class AmlCompute(Compute): + """An Azure Machine Learning compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param properties: AML Compute properties. + :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlCompute, self).__init__(**kwargs) + self.compute_type = 'AmlCompute' # type: str + self.properties = kwargs.get('properties', None) + + +class AmlComputeNodeInformation(msrest.serialization.Model): + """Compute node information related to a AmlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar node_id: ID of the compute node. + :vartype node_id: str + :ivar private_ip_address: Private IP address of the compute node. + :vartype private_ip_address: str + :ivar public_ip_address: Public IP address of the compute node. + :vartype public_ip_address: str + :ivar port: SSH port number of the node. + :vartype port: int + :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, + leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", + "leaving", "preempted". + :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState + :ivar run_id: ID of the Experiment running on the node, if any else null. + :vartype run_id: str + """ + + _validation = { + 'node_id': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'port': {'readonly': True}, + 'node_state': {'readonly': True}, + 'run_id': {'readonly': True}, + } + + _attribute_map = { + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'node_state': {'key': 'nodeState', 'type': 'str'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlComputeNodeInformation, self).__init__(**kwargs) + self.node_id = None + self.private_ip_address = None + self.public_ip_address = None + self.port = None + self.node_state = None + self.run_id = None + + +class ComputeNodesInformation(msrest.serialization.Model): + """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmlComputeNodesInformation. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :ivar next_link: The continuation token. + :vartype next_link: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + _subtype_map = { + 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} + } + + def __init__( + self, + **kwargs + ): + super(ComputeNodesInformation, self).__init__(**kwargs) + self.compute_type = None # type: Optional[str] + self.next_link = None + + +class AmlComputeNodesInformation(ComputeNodesInformation): + """Compute node information related to a AmlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :ivar next_link: The continuation token. + :vartype next_link: str + :ivar nodes: The collection of returned AmlCompute nodes details. + :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation] + """ + + _validation = { + 'compute_type': {'required': True}, + 'next_link': {'readonly': True}, + 'nodes': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlComputeNodesInformation, self).__init__(**kwargs) + self.compute_type = 'AmlCompute' # type: str + self.nodes = None + + +class AmlComputeProperties(msrest.serialization.Model): + """AML Compute properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param vm_size: Virtual Machine Size. + :type vm_size: str + :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated", + "LowPriority". + :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority + :param scale_settings: Scale settings for AML Compute. + :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings + :param user_account_credentials: Credentials for an administrator user account that will be + created on each compute node. + :type user_account_credentials: + ~azure_machine_learning_workspaces.models.UserAccountCredentials + :param subnet: Virtual network subnet resource ID the compute nodes belong to. + :type subnet: ~azure_machine_learning_workspaces.models.ResourceId + :param remote_login_port_public_access: State of the public SSH port. Possible values are: + Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - + Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - + Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, + else is open all public nodes. It can be default only during cluster creation time, after + creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", + "NotSpecified". Default value: "NotSpecified". + :type remote_login_port_public_access: str or + ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess + :ivar allocation_state: Allocation state of the compute. Possible values are: steady - + Indicates that the compute is not resizing. There are no changes to the number of compute nodes + in the compute in progress. A compute enters this state when it is created and when no + operations are being performed on the compute to change the number of compute nodes. resizing - + Indicates that the compute is resizing; that is, compute nodes are being added to or removed + from the compute. Possible values include: "Steady", "Resizing". + :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState + :ivar allocation_state_transition_time: The time at which the compute entered its current + allocation state. + :vartype allocation_state_transition_time: ~datetime.datetime + :ivar errors: Collection of errors encountered by various compute nodes during node setup. + :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar current_node_count: The number of compute nodes currently assigned to the compute. + :vartype current_node_count: int + :ivar target_node_count: The target number of compute nodes for the compute. If the + allocationState is resizing, this property denotes the target node count for the ongoing resize + operation. If the allocationState is steady, this property denotes the target node count for + the previous resize operation. + :vartype target_node_count: int + :ivar node_state_counts: Counts of various node states on the compute. + :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts + """ + + _validation = { + 'allocation_state': {'readonly': True}, + 'allocation_state_transition_time': {'readonly': True}, + 'errors': {'readonly': True}, + 'current_node_count': {'readonly': True}, + 'target_node_count': {'readonly': True}, + 'node_state_counts': {'readonly': True}, + } + + _attribute_map = { + 'vm_size': {'key': 'vmSize', 'type': 'str'}, + 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, + 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, + 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, + 'allocation_state': {'key': 'allocationState', 'type': 'str'}, + 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, + 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, + 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, + 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, + 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlComputeProperties, self).__init__(**kwargs) + self.vm_size = kwargs.get('vm_size', None) + self.vm_priority = kwargs.get('vm_priority', None) + self.scale_settings = kwargs.get('scale_settings', None) + self.user_account_credentials = kwargs.get('user_account_credentials', None) + self.subnet = kwargs.get('subnet', None) + self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified") + self.allocation_state = None + self.allocation_state_transition_time = None + self.errors = None + self.current_node_count = None + self.target_node_count = None + self.node_state_counts = None + + +class AmlUserFeature(msrest.serialization.Model): + """Features enabled for a workspace. + + :param id: Specifies the feature ID. + :type id: str + :param display_name: Specifies the feature name. + :type display_name: str + :param description: Describes the feature for user experience. + :type description: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlUserFeature, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.display_name = kwargs.get('display_name', None) + self.description = kwargs.get('description', None) + + +class ClusterUpdateParameters(msrest.serialization.Model): + """AmlCompute update parameters. + + :param scale_settings: Desired scale settings for the amlCompute. + :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings + """ + + _attribute_map = { + 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(ClusterUpdateParameters, self).__init__(**kwargs) + self.scale_settings = kwargs.get('scale_settings', None) + + +class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model): + """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class ComputeInstance(Compute): + """An Azure Machine Learning compute instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param vm_size: Virtual Machine Size. + :type vm_size: str + :param application_sharing_policy: Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access applications on this + compute instance. When Shared, any workspace user can access applications on this instance + depending on his/her assigned role. Possible values include: "Personal", "Shared". Default + value: "Shared". + :type application_sharing_policy: str or + ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy + :param ssh_settings: Specifies policy and settings for SSH access. + :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings + :ivar connectivity_endpoints: Describes all connectivity endpoints available for this + ComputeInstance. + :vartype connectivity_endpoints: + ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints + :ivar applications: Describes available applications and their endpoints on this + ComputeInstance. + :vartype applications: + list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication] + :ivar created_by: Describes information on user who created this ComputeInstance. + :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy + :ivar errors: Collection of errors encountered on this ComputeInstance. + :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", + "CreateFailed", "Deleting", "Running", "Restarting", "RestartFailed", "JobRunning", + "SettingUp", "Starting", "StartFailed", "StopFailed", "Stopped", "Stopping", "UserSettingUp", + "Unknown", "Unusable". + :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState + :param id: The ID of the resource. + :type id: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'connectivity_endpoints': {'readonly': True}, + 'applications': {'readonly': True}, + 'created_by': {'readonly': True}, + 'errors': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'vm_size': {'key': 'properties.vmSize', 'type': 'str'}, + 'application_sharing_policy': {'key': 'properties.applicationSharingPolicy', 'type': 'str'}, + 'ssh_settings': {'key': 'properties.sshSettings', 'type': 'ComputeInstanceSshSettings'}, + 'connectivity_endpoints': {'key': 'properties.connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, + 'applications': {'key': 'properties.applications', 'type': '[ComputeInstanceApplication]'}, + 'created_by': {'key': 'properties.createdBy', 'type': 'ComputeInstanceCreatedBy'}, + 'errors': {'key': 'properties.errors', 'type': '[MachineLearningServiceError]'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'id': {'key': 'properties.subnet.id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstance, self).__init__(**kwargs) + self.compute_type = 'ComputeInstance' # type: str + self.vm_size = kwargs.get('vm_size', None) + self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared") + self.ssh_settings = kwargs.get('ssh_settings', None) + self.connectivity_endpoints = None + self.applications = None + self.created_by = None + self.errors = None + self.state = None + self.id = kwargs.get('id', None) + + +class ComputeInstanceApplication(msrest.serialization.Model): + """Defines an Aml Instance application and its connectivity endpoint URI. + + :param display_name: Name of the ComputeInstance application. + :type display_name: str + :param endpoint_uri: Application' endpoint URI. + :type endpoint_uri: str + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstanceApplication, self).__init__(**kwargs) + self.display_name = kwargs.get('display_name', None) + self.endpoint_uri = kwargs.get('endpoint_uri', None) + + +class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): + """Defines all connectivity endpoints and properties for a ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public_ip_address: Public IP Address of this ComputeInstance. + :vartype public_ip_address: str + :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in + which the compute instance is deployed). + :vartype private_ip_address: str + """ + + _validation = { + 'public_ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + } + + _attribute_map = { + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) + self.public_ip_address = None + self.private_ip_address = None + + +class ComputeInstanceCreatedBy(msrest.serialization.Model): + """Describes information on user who created this ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_name: Name of the user. + :vartype user_name: str + :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. + :vartype user_org_id: str + :ivar user_id: Uniquely identifies the user within his/her organization. + :vartype user_id: str + """ + + _validation = { + 'user_name': {'readonly': True}, + 'user_org_id': {'readonly': True}, + 'user_id': {'readonly': True}, + } + + _attribute_map = { + 'user_name': {'key': 'userName', 'type': 'str'}, + 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstanceCreatedBy, self).__init__(**kwargs) + self.user_name = None + self.user_org_id = None + self.user_id = None + + +class ComputeInstanceSshSettings(msrest.serialization.Model): + """Specifies policy and settings for SSH access. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param ssh_public_access: State of the public SSH port. Possible values are: Disabled - + Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the + public ssh port is open and accessible according to the VNet/subnet policy if applicable. + Possible values include: "Enabled", "Disabled". Default value: "Disabled". + :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess + :ivar admin_user_name: Describes the admin user name. + :vartype admin_user_name: str + :ivar ssh_port: Describes the port for connecting through SSH. + :vartype ssh_port: int + :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t + rsa -b 2048" to generate your SSH key pairs. + :type admin_public_key: str + """ + + _validation = { + 'admin_user_name': {'readonly': True}, + 'ssh_port': {'readonly': True}, + } + + _attribute_map = { + 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstanceSshSettings, self).__init__(**kwargs) + self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled") + self.admin_user_name = None + self.ssh_port = None + self.admin_public_key = kwargs.get('admin_public_key', None) + + +class Resource(msrest.serialization.Model): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.location = kwargs.get('location', None) + self.type = None + self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.principal_id = None + self.tenant_id = None + self.type_identity_type = kwargs.get('type_identity_type', None) + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) + + +class ComputeResource(Resource): + """Machine Learning compute object wrapped into ARM resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :param properties: Compute properties. + :type properties: ~azure_machine_learning_workspaces.models.Compute + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'properties': {'key': 'properties', 'type': 'Compute'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class Databricks(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param databricks_access_token: Databricks access token. + :type databricks_access_token: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'databricks_access_token': {'key': 'properties.databricksAccessToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Databricks, self).__init__(**kwargs) + self.compute_type = 'Databricks' # type: str + self.databricks_access_token = kwargs.get('databricks_access_token', None) + + +class DatabricksComputeSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on Databricks. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param databricks_access_token: access token for databricks account. + :type databricks_access_token: str + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatabricksComputeSecrets, self).__init__(**kwargs) + self.compute_type = 'Databricks' # type: str + self.databricks_access_token = kwargs.get('databricks_access_token', None) + + +class DataFactory(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFactory, self).__init__(**kwargs) + self.compute_type = 'DataFactory' # type: str + + +class DataLakeAnalytics(Compute): + """A DataLakeAnalytics compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param data_lake_store_account_name: DataLake Store Account Name. + :type data_lake_store_account_name: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'data_lake_store_account_name': {'key': 'properties.dataLakeStoreAccountName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DataLakeAnalytics, self).__init__(**kwargs) + self.compute_type = 'DataLakeAnalytics' # type: str + self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) + + +class ErrorDetail(msrest.serialization.Model): + """Error detail information. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = kwargs['code'] + self.message = kwargs['message'] + + +class ErrorResponse(msrest.serialization.Model): + """Error response information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Error code. + :vartype code: str + :ivar message: Error message. + :vartype message: str + :ivar details: An array of error detail objects. + :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'details': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + self.details = None + + +class HdInsight(Compute): + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param ssh_port: Port open for ssh connections on the master node of the cluster. + :type ssh_port: int + :param address: Public IP address of the master node of the cluster. + :type address: str + :param administrator_account: Admin credentials for master node of the cluster. + :type administrator_account: + ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'ssh_port': {'key': 'properties.sshPort', 'type': 'int'}, + 'address': {'key': 'properties.address', 'type': 'str'}, + 'administrator_account': {'key': 'properties.administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + **kwargs + ): + super(HdInsight, self).__init__(**kwargs) + self.compute_type = 'HDInsight' # type: str + self.ssh_port = kwargs.get('ssh_port', None) + self.address = kwargs.get('address', None) + self.administrator_account = kwargs.get('administrator_account', None) + + +class KeyVaultProperties(msrest.serialization.Model): + """KeyVaultProperties. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned + encryption key is present. + :type key_vault_arm_id: str + :param key_identifier: Required. Key vault uri to access the encryption key. + :type key_identifier: str + :param identity_client_id: For future use - The client id of the identity which will be used to + access key vault. + :type identity_client_id: str + """ + + _validation = { + 'key_vault_arm_id': {'required': True}, + 'key_identifier': {'required': True}, + } + + _attribute_map = { + 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_arm_id = kwargs['key_vault_arm_id'] + self.key_identifier = kwargs['key_identifier'] + self.identity_client_id = kwargs.get('identity_client_id', None) + + +class ListAmlUserFeatureResult(msrest.serialization.Model): + """The List Aml user feature operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML user facing features. + :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature] + :ivar next_link: The URI to fetch the next page of AML user features information. Call + ListNext() with this to fetch the next page of AML user features information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListAmlUserFeatureResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListUsagesResult(msrest.serialization.Model): + """The List Usages operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML resource usages. + :vartype value: list[~azure_machine_learning_workspaces.models.Usage] + :ivar next_link: The URI to fetch the next page of AML resource usage information. Call + ListNext() with this to fetch the next page of AML resource usage information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Usage]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListUsagesResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListWorkspaceKeysResult(msrest.serialization.Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + :ivar user_storage_resource_id: + :vartype user_storage_resource_id: str + :ivar app_insights_instrumentation_key: + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult + :param notebook_access_keys: + :type notebook_access_keys: + ~azure_machine_learning_workspaces.models.NotebookListCredentialsResult + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + 'user_storage_resource_id': {'readonly': True}, + 'app_insights_instrumentation_key': {'readonly': True}, + 'container_registry_credentials': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, + 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, + 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'}, + } + + def __init__( + self, + **kwargs + ): + super(ListWorkspaceKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + self.user_storage_resource_id = None + self.app_insights_instrumentation_key = None + self.container_registry_credentials = None + self.notebook_access_keys = kwargs.get('notebook_access_keys', None) + + +class ListWorkspaceQuotas(msrest.serialization.Model): + """The List WorkspaceQuotasByVMFamily operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of Workspace Quotas by VM Family. + :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota] + :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. + Call ListNext() with this to fetch the next page of Workspace Quota information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ResourceQuota]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListWorkspaceQuotas, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class MachineLearningServiceError(msrest.serialization.Model): + """Wrapper for error response to follow ARM guidelines. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar error: The error response. + :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse + """ + + _validation = { + 'error': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(MachineLearningServiceError, self).__init__(**kwargs) + self.error = None + + +class NodeStateCounts(msrest.serialization.Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Number of compute nodes which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, + 'preempted_node_count': {'readonly': True}, + } + + _attribute_map = { + 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, + 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, + 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, + 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, + 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, + 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(NodeStateCounts, self).__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None + + +class NotebookListCredentialsResult(msrest.serialization.Model): + """NotebookListCredentialsResult. + + :param primary_access_key: + :type primary_access_key: str + :param secondary_access_key: + :type secondary_access_key: str + """ + + _attribute_map = { + 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, + 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookListCredentialsResult, self).__init__(**kwargs) + self.primary_access_key = kwargs.get('primary_access_key', None) + self.secondary_access_key = kwargs.get('secondary_access_key', None) + + +class NotebookPreparationError(msrest.serialization.Model): + """NotebookPreparationError. + + :param error_message: + :type error_message: str + :param status_code: + :type status_code: int + """ + + _attribute_map = { + 'error_message': {'key': 'errorMessage', 'type': 'str'}, + 'status_code': {'key': 'statusCode', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookPreparationError, self).__init__(**kwargs) + self.error_message = kwargs.get('error_message', None) + self.status_code = kwargs.get('status_code', None) + + +class NotebookResourceInfo(msrest.serialization.Model): + """NotebookResourceInfo. + + :param fqdn: + :type fqdn: str + :param resource_id: the data plane resourceId that used to initialize notebook component. + :type resource_id: str + :param notebook_preparation_error: The error that occurs when preparing notebook. + :type notebook_preparation_error: + ~azure_machine_learning_workspaces.models.NotebookPreparationError + """ + + _attribute_map = { + 'fqdn': {'key': 'fqdn', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + } + + def __init__( + self, + **kwargs + ): + super(NotebookResourceInfo, self).__init__(**kwargs) + self.fqdn = kwargs.get('fqdn', None) + self.resource_id = kwargs.get('resource_id', None) + self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None) + + +class Operation(msrest.serialization.Model): + """Azure Machine Learning workspace REST API operation. + + :param name: Operation name: {provider}/{resource}/{operation}. + :type name: str + :param display: Display name of operation. + :type display: ~azure_machine_learning_workspaces.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.display = kwargs.get('display', None) + + +class OperationDisplay(msrest.serialization.Model): + """Display name of operation. + + :param provider: The resource provider name: Microsoft.MachineLearningExperimentation. + :type provider: str + :param resource: The resource on which the operation is performed. + :type resource: str + :param operation: The operation that users can perform. + :type operation: str + :param description: The description for the operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + self.description = kwargs.get('description', None) + + +class OperationListResult(msrest.serialization.Model): + """An array of operations supported by the resource provider. + + :param value: List of AML workspace operations supported by the AML workspace resource + provider. + :type value: list[~azure_machine_learning_workspaces.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class PaginatedComputeResourcesList(msrest.serialization.Model): + """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. + + :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope. + :type value: list[~azure_machine_learning_workspaces.models.ComputeResource] + :param next_link: A continuation link (absolute URI) to the next page of results in the list. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ComputeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PaginatedComputeResourcesList, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class PaginatedWorkspaceConnectionsList(msrest.serialization.Model): + """Paginated list of Workspace connection objects. + + :param value: An array of Workspace connection objects. + :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection] + :param next_link: A continuation link (absolute URI) to the next page of results in the list. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[WorkspaceConnection]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class Password(msrest.serialization.Model): + """Password. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: + :vartype name: str + :ivar value: + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Password, self).__init__(**kwargs) + self.name = None + self.value = None + + +class PrivateEndpoint(msrest.serialization.Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(msrest.serialization.Model): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: ResourceId of the private endpoint connection. + :vartype id: str + :ivar name: Friendly name of the private endpoint connection. + :vartype name: str + :ivar type: Resource type of private endpoint connection. + :vartype type: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint + :param private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :type private_link_service_connection_state: + ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Possible values include: "Succeeded", "Creating", "Deleting", "Failed". + :vartype provisioning_state: str or + ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + self.provisioning_state = None + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :param required_zone_names: The private link resource Private link DNS zone name. + :type required_zone_names: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResource, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = kwargs.get('required_zone_names', None) + + +class PrivateLinkResourceListResult(msrest.serialization.Model): + """A list of private link resources. + + :param value: Array of private link resources. + :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """A collection of information about the state of the connection between service consumer and provider. + + :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", + "Timeout". + :type status: str or + ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus + :param description: The reason for approval/rejection of the connection. + :type description: str + :param actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = kwargs.get('actions_required', None) + + +class QuotaBaseProperties(msrest.serialization.Model): + """The properties for Quota update or retrieval. + + :param id: Specifies the resource ID. + :type id: str + :param type: Specifies the resource type. + :type type: str + :param limit: The maximum permitted quota of the resource. + :type limit: long + :param unit: An enum describing the unit of quota measurement. Possible values include: + "Count". + :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(QuotaBaseProperties, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.type = kwargs.get('type', None) + self.limit = kwargs.get('limit', None) + self.unit = kwargs.get('unit', None) + + +class QuotaUpdateParameters(msrest.serialization.Model): + """Quota update parameters. + + :param value: The list for update quota. + :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(QuotaUpdateParameters, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class RegistryListCredentialsResult(msrest.serialization.Model): + """RegistryListCredentialsResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: + :vartype location: str + :ivar username: + :vartype username: str + :param passwords: + :type passwords: list[~azure_machine_learning_workspaces.models.Password] + """ + + _validation = { + 'location': {'readonly': True}, + 'username': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'str'}, + 'passwords': {'key': 'passwords', 'type': '[Password]'}, + } + + def __init__( + self, + **kwargs + ): + super(RegistryListCredentialsResult, self).__init__(**kwargs) + self.location = None + self.username = None + self.passwords = kwargs.get('passwords', None) + + +class ResourceId(msrest.serialization.Model): + """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. The ID of the resource. + :type id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceId, self).__init__(**kwargs) + self.id = kwargs['id'] + + +class ResourceName(msrest.serialization.Model): + """The Resource Name. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class ResourceQuota(msrest.serialization.Model): + """The quota assigned to a resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar name: Name of the resource. + :vartype name: ~azure_machine_learning_workspaces.models.ResourceName + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'limit': {'readonly': True}, + 'unit': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'ResourceName'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceQuota, self).__init__(**kwargs) + self.id = None + self.type = None + self.name = None + self.limit = None + self.unit = None + + +class ResourceSkuLocationInfo(msrest.serialization.Model): + """ResourceSkuLocationInfo. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: Location of the SKU. + :vartype location: str + :ivar zones: List of availability zones where the SKU is supported. + :vartype zones: list[str] + :ivar zone_details: Details of capabilities available to a SKU in specific zones. + :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails] + """ + + _validation = { + 'location': {'readonly': True}, + 'zones': {'readonly': True}, + 'zone_details': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'zones': {'key': 'zones', 'type': '[str]'}, + 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceSkuLocationInfo, self).__init__(**kwargs) + self.location = None + self.zones = None + self.zone_details = None + + +class ResourceSkuZoneDetails(msrest.serialization.Model): + """Describes The zonal capabilities of a SKU. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The set of zones that the SKU is available in with the specified capabilities. + :vartype name: list[str] + :ivar capabilities: A list of capabilities that are available for the SKU in the specified list + of zones. + :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability] + """ + + _validation = { + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': '[str]'}, + 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceSkuZoneDetails, self).__init__(**kwargs) + self.name = None + self.capabilities = None + + +class Restriction(msrest.serialization.Model): + """The restriction because of which SKU cannot be used. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The type of restrictions. As of now only possible value for this is location. + :vartype type: str + :ivar values: The value of restrictions. If the restriction type is set to location. This would + be different locations where the SKU is restricted. + :vartype values: list[str] + :param reason_code: The reason for the restriction. Possible values include: "NotSpecified", + "NotAvailableForRegion", "NotAvailableForSubscription". + :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode + """ + + _validation = { + 'type': {'readonly': True}, + 'values': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Restriction, self).__init__(**kwargs) + self.type = None + self.values = None + self.reason_code = kwargs.get('reason_code', None) + + +class ScaleSettings(msrest.serialization.Model): + """scale settings for AML Compute. + + All required parameters must be populated in order to send to Azure. + + :param max_node_count: Required. Max number of nodes to use. + :type max_node_count: int + :param min_node_count: Min number of nodes to use. + :type min_node_count: int + :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. + :type node_idle_time_before_scale_down: ~datetime.timedelta + """ + + _validation = { + 'max_node_count': {'required': True}, + } + + _attribute_map = { + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + } + + def __init__( + self, + **kwargs + ): + super(ScaleSettings, self).__init__(**kwargs) + self.max_node_count = kwargs['max_node_count'] + self.min_node_count = kwargs.get('min_node_count', 0) + self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) + + +class ServicePrincipalCredentials(msrest.serialization.Model): + """Service principal credentials. + + All required parameters must be populated in order to send to Azure. + + :param client_id: Required. Client Id. + :type client_id: str + :param client_secret: Required. Client secret. + :type client_secret: str + """ + + _validation = { + 'client_id': {'required': True}, + 'client_secret': {'required': True}, + } + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ServicePrincipalCredentials, self).__init__(**kwargs) + self.client_id = kwargs['client_id'] + self.client_secret = kwargs['client_secret'] + + +class SharedPrivateLinkResource(msrest.serialization.Model): + """SharedPrivateLinkResource. + + :param name: Unique name of the private link. + :type name: str + :param private_link_resource_id: The resource id that private link links to. + :type private_link_resource_id: str + :param group_id: The private link resource group id. + :type group_id: str + :param request_message: Request message. + :type request_message: str + :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", + "Timeout". + :type status: str or + ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SharedPrivateLinkResource, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.private_link_resource_id = kwargs.get('private_link_resource_id', None) + self.group_id = kwargs.get('group_id', None) + self.request_message = kwargs.get('request_message', None) + self.status = kwargs.get('status', None) + + +class Sku(msrest.serialization.Model): + """Sku of the resource. + + :param name: Name of the sku. + :type name: str + :param tier: Tier of the sku like Basic or Enterprise. + :type tier: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.tier = kwargs.get('tier', None) + + +class SkuCapability(msrest.serialization.Model): + """Features/user capabilities associated with the sku. + + :param name: Capability/Feature ID. + :type name: str + :param value: Details about the feature/capability. + :type value: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuCapability, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + +class SkuListResult(msrest.serialization.Model): + """List of skus with features. + + :param value: + :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku] + :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this + URI to fetch the next page of Workspace Skus. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[WorkspaceSku]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class SkuSettings(msrest.serialization.Model): + """Describes Workspace Sku details and features. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar locations: The set of locations that the SKU is available. This will be supported and + registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). + :vartype locations: list[str] + :ivar location_info: A list of locations and availability zones in those locations where the + SKU is available. + :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo] + :ivar tier: Sku Tier like Basic or Enterprise. + :vartype tier: str + :ivar resource_type: + :vartype resource_type: str + :ivar name: + :vartype name: str + :ivar capabilities: List of features/user capabilities associated with the sku. + :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability] + :param restrictions: The restrictions because of which SKU cannot be used. This is empty if + there are no restrictions. + :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction] + """ + + _validation = { + 'locations': {'readonly': True}, + 'location_info': {'readonly': True}, + 'tier': {'readonly': True}, + 'resource_type': {'readonly': True}, + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'locations': {'key': 'locations', 'type': '[str]'}, + 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'}, + 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + } + + def __init__( + self, + **kwargs + ): + super(SkuSettings, self).__init__(**kwargs) + self.locations = None + self.location_info = None + self.tier = None + self.resource_type = None + self.name = None + self.capabilities = None + self.restrictions = kwargs.get('restrictions', None) + + +class SSLConfiguration(msrest.serialization.Model): + """The ssl configuration for scoring. + + :param status: Enable or disable ssl for scoring. Possible values include: "Disabled", + "Enabled". + :type status: str or ~azure_machine_learning_workspaces.models.SSLConfigurationStatus + :param cert: Cert data. + :type cert: str + :param key: Key data. + :type key: str + :param cname: CNAME of the cert. + :type cname: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'cert': {'key': 'cert', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + 'cname': {'key': 'cname', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SSLConfiguration, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.cert = kwargs.get('cert', None) + self.key = kwargs.get('key', None) + self.cname = kwargs.get('cname', None) + + +class SystemService(msrest.serialization.Model): + """A system service running on a compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar system_service_type: The type of this system service. + :vartype system_service_type: str + :ivar public_ip_address: Public IP address. + :vartype public_ip_address: str + :ivar version: The version for this type. + :vartype version: str + """ + + _validation = { + 'system_service_type': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemService, self).__init__(**kwargs) + self.system_service_type = None + self.public_ip_address = None + self.version = None + + +class UpdateWorkspaceQuotas(msrest.serialization.Model): + """The properties for update Quota response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :param limit: The maximum permitted quota of the resource. + :type limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit + :param status: Status of update workspace quota. Possible values include: "Undefined", + "Success", "Failure", "InvalidQuotaBelowClusterMinimum", + "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", + "OperationNotEnabledForRegion". + :type status: str or ~azure_machine_learning_workspaces.models.Status + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UpdateWorkspaceQuotas, self).__init__(**kwargs) + self.id = None + self.type = None + self.limit = kwargs.get('limit', None) + self.unit = None + self.status = kwargs.get('status', None) + + +class UpdateWorkspaceQuotasResult(msrest.serialization.Model): + """The result of update workspace quota. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of workspace quota update result. + :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas] + :ivar next_link: The URI to fetch the next page of workspace quota update result. Call + ListNext() with this to fetch the next page of Workspace Quota update result. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class Usage(msrest.serialization.Model): + """Describes AML Resource Usage. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". + :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit + :ivar current_value: The current usage of the resource. + :vartype current_value: long + :ivar limit: The maximum permitted usage of the resource. + :vartype limit: long + :ivar name: The name of the type of usage. + :vartype name: ~azure_machine_learning_workspaces.models.UsageName + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + 'current_value': {'readonly': True}, + 'limit': {'readonly': True}, + 'name': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'current_value': {'key': 'currentValue', 'type': 'long'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'UsageName'}, + } + + def __init__( + self, + **kwargs + ): + super(Usage, self).__init__(**kwargs) + self.id = None + self.type = None + self.unit = None + self.current_value = None + self.limit = None + self.name = None + + +class UsageName(msrest.serialization.Model): + """The Usage Names. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UsageName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class UserAccountCredentials(msrest.serialization.Model): + """Settings for user account that gets created on each on the nodes of a compute. + + All required parameters must be populated in order to send to Azure. + + :param admin_user_name: Required. Name of the administrator user account which can be used to + SSH to nodes. + :type admin_user_name: str + :param admin_user_ssh_public_key: SSH public key of the administrator user account. + :type admin_user_ssh_public_key: str + :param admin_user_password: Password of the administrator user account. + :type admin_user_password: str + """ + + _validation = { + 'admin_user_name': {'required': True}, + } + + _attribute_map = { + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, + 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UserAccountCredentials, self).__init__(**kwargs) + self.admin_user_name = kwargs['admin_user_name'] + self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None) + self.admin_user_password = kwargs.get('admin_user_password', None) + + +class VirtualMachine(Compute): + """A Machine Learning compute based on Azure Virtual Machines. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param virtual_machine_size: Virtual Machine size. + :type virtual_machine_size: str + :param ssh_port: Port open for ssh connections. + :type ssh_port: int + :param address: Public IP address of the virtual machine. + :type address: str + :param administrator_account: Admin credentials for virtual machine. + :type administrator_account: + ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'virtual_machine_size': {'key': 'properties.virtualMachineSize', 'type': 'str'}, + 'ssh_port': {'key': 'properties.sshPort', 'type': 'int'}, + 'address': {'key': 'properties.address', 'type': 'str'}, + 'administrator_account': {'key': 'properties.administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualMachine, self).__init__(**kwargs) + self.compute_type = 'VirtualMachine' # type: str + self.virtual_machine_size = kwargs.get('virtual_machine_size', None) + self.ssh_port = kwargs.get('ssh_port', None) + self.address = kwargs.get('address', None) + self.administrator_account = kwargs.get('administrator_account', None) + + +class VirtualMachineSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on AKS. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param administrator_account: Admin credentials for virtual machine. + :type administrator_account: + ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualMachineSecrets, self).__init__(**kwargs) + self.compute_type = 'VirtualMachine' # type: str + self.administrator_account = kwargs.get('administrator_account', None) + + +class VirtualMachineSize(msrest.serialization.Model): + """Describes the properties of a VM size. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the virtual machine size. + :vartype name: str + :ivar family: The family name of the virtual machine size. + :vartype family: str + :ivar v_cp_us: The number of vCPUs supported by the virtual machine size. + :vartype v_cp_us: int + :ivar gpus: The number of gPUs supported by the virtual machine size. + :vartype gpus: int + :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size. + :vartype os_vhd_size_mb: int + :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine + size. + :vartype max_resource_volume_mb: int + :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size. + :vartype memory_gb: float + :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs. + :vartype low_priority_capable: bool + :ivar premium_io: Specifies if the virtual machine size supports premium IO. + :vartype premium_io: bool + """ + + _validation = { + 'name': {'readonly': True}, + 'family': {'readonly': True}, + 'v_cp_us': {'readonly': True}, + 'gpus': {'readonly': True}, + 'os_vhd_size_mb': {'readonly': True}, + 'max_resource_volume_mb': {'readonly': True}, + 'memory_gb': {'readonly': True}, + 'low_priority_capable': {'readonly': True}, + 'premium_io': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, + 'gpus': {'key': 'gpus', 'type': 'int'}, + 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, + 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, + 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, + 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, + 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualMachineSize, self).__init__(**kwargs) + self.name = None + self.family = None + self.v_cp_us = None + self.gpus = None + self.os_vhd_size_mb = None + self.max_resource_volume_mb = None + self.memory_gb = None + self.low_priority_capable = None + self.premium_io = None + + +class VirtualMachineSizeListResult(msrest.serialization.Model): + """The List Virtual Machine size operation response. + + :param aml_compute: The list of virtual machine sizes supported by AmlCompute. + :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize] + """ + + _attribute_map = { + 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualMachineSizeListResult, self).__init__(**kwargs) + self.aml_compute = kwargs.get('aml_compute', None) + + +class VirtualMachineSshCredentials(msrest.serialization.Model): + """Admin credentials for virtual machine. + + :param username: Username of admin account. + :type username: str + :param password: Password of admin account. + :type password: str + :param public_key_data: Public key data. + :type public_key_data: str + :param private_key_data: Private key data. + :type private_key_data: str + """ + + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, + 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualMachineSshCredentials, self).__init__(**kwargs) + self.username = kwargs.get('username', None) + self.password = kwargs.get('password', None) + self.public_key_data = kwargs.get('public_key_data', None) + self.private_key_data = kwargs.get('private_key_data', None) + + +class Workspace(Resource): + """An object that represents a machine learning workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :ivar workspace_id: The immutable id associated with this workspace. + :vartype workspace_id: str + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. This name in mutable. + :type friendly_name: str + :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format. + :vartype creation_time: ~datetime.datetime + :param key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :type key_vault: str + :param application_insights: ARM id of the application insights associated with this workspace. + This cannot be changed once the workspace has been created. + :type application_insights: str + :param container_registry: ARM id of the container registry associated with this workspace. + This cannot be changed once the workspace has been created. + :type container_registry: str + :param storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :type storage_account: str + :param discovery_url: Url for the discovery service to identify regional endpoints for machine + learning experimentation services. + :type discovery_url: str + :ivar provisioning_state: The current deployment state of workspace resource. The + provisioningState is to indicate states for resource provisioning. Possible values include: + "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + collected by the service. + :type hbi_workspace: bool + :ivar service_provisioned_resource_group: The name of the managed resource group created by + workspace RP in customer subscription if the workspace is CMK workspace. + :vartype service_provisioned_resource_group: str + :ivar private_link_count: Count of private connections in the workspace. + :vartype private_link_count: int + :param image_build_compute: The compute name for image build. + :type image_build_compute: str + :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :type allow_public_access_when_behind_vnet: bool + :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. + :vartype private_endpoint_connections: + list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection] + :param shared_private_link_resources: The list of shared private link resources in this + workspace. + :type shared_private_link_resources: + list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource] + :ivar notebook_info: The notebook info of Azure ML workspace. + :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo + :param status: Indicates whether or not the encryption is enabled for the workspace. Possible + values include: "Enabled", "Disabled". + :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus + :param key_vault_properties: Customer Key vault properties. + :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'workspace_id': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'service_provisioned_resource_group': {'readonly': True}, + 'private_link_count': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, + 'notebook_info': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, + 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, + 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, + 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, + 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, + 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, + 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, + 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, + 'status': {'key': 'properties.encryption.status', 'type': 'str'}, + 'key_vault_properties': {'key': 'properties.encryption.keyVaultProperties', 'type': 'KeyVaultProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(Workspace, self).__init__(**kwargs) + self.workspace_id = None + self.description = kwargs.get('description', None) + self.friendly_name = kwargs.get('friendly_name', None) + self.creation_time = None + self.key_vault = kwargs.get('key_vault', None) + self.application_insights = kwargs.get('application_insights', None) + self.container_registry = kwargs.get('container_registry', None) + self.storage_account = kwargs.get('storage_account', None) + self.discovery_url = kwargs.get('discovery_url', None) + self.provisioning_state = None + self.hbi_workspace = kwargs.get('hbi_workspace', False) + self.service_provisioned_resource_group = None + self.private_link_count = None + self.image_build_compute = kwargs.get('image_build_compute', None) + self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', False) + self.private_endpoint_connections = None + self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None) + self.notebook_info = None + self.status = kwargs.get('status', None) + self.key_vault_properties = kwargs.get('key_vault_properties', None) + + +class WorkspaceConnection(msrest.serialization.Model): + """Workspace connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: ResourceId of the workspace connection. + :vartype id: str + :ivar name: Friendly name of the workspace connection. + :vartype name: str + :ivar type: Resource type of workspace connection. + :vartype type: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'target': {'key': 'properties.target', 'type': 'str'}, + 'auth_type': {'key': 'properties.authType', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceConnection, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.category = kwargs.get('category', None) + self.target = kwargs.get('target', None) + self.auth_type = kwargs.get('auth_type', None) + self.value = kwargs.get('value', None) + + +class WorkspaceConnectionDto(msrest.serialization.Model): + """object used for creating workspace connection. + + :param name: Friendly name of the workspace connection. + :type name: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'target': {'key': 'properties.target', 'type': 'str'}, + 'auth_type': {'key': 'properties.authType', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceConnectionDto, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.category = kwargs.get('category', None) + self.target = kwargs.get('target', None) + self.auth_type = kwargs.get('auth_type', None) + self.value = kwargs.get('value', None) + + +class WorkspaceListResult(msrest.serialization.Model): + """The result of a request to list machine learning workspaces. + + :param value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :type value: list[~azure_machine_learning_workspaces.models.Workspace] + :param next_link: The URI that can be used to request the next list of machine learning + workspaces. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Workspace]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceListResult, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class WorkspaceSku(msrest.serialization.Model): + """AML workspace sku information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar resource_type: + :vartype resource_type: str + :ivar skus: The list of workspace sku settings. + :vartype skus: list[~azure_machine_learning_workspaces.models.SkuSettings] + """ + + _validation = { + 'resource_type': {'readonly': True}, + 'skus': {'readonly': True}, + } + + _attribute_map = { + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'skus': {'key': 'skus', 'type': '[SkuSettings]'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceSku, self).__init__(**kwargs) + self.resource_type = None + self.skus = None + + +class WorkspaceUpdateParameters(msrest.serialization.Model): + """The parameters for updating a machine learning workspace. + + :param tags: A set of tags. The resource tags for the machine learning workspace. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. + :type friendly_name: str + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceUpdateParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.sku = kwargs.get('sku', None) + self.description = kwargs.get('description', None) + self.friendly_name = kwargs.get('friendly_name', None) diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py new file mode 100644 index 00000000000..fd8d26e9473 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/models/_models_py3.py @@ -0,0 +1,3682 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._azure_machine_learning_workspaces_enums import * + + +class Compute(msrest.serialization.Model): + """Machine Learning compute object. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: Aks, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HdInsight, VirtualMachine. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + } + + _subtype_map = { + 'compute_type': {'AKS': 'Aks', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HdInsight', 'VirtualMachine': 'VirtualMachine'} + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + super(Compute, self).__init__(**kwargs) + self.compute_type = None # type: Optional[str] + self.compute_location = compute_location + self.provisioning_state = None + self.description = description + self.created_on = None + self.modified_on = None + self.resource_id = resource_id + self.provisioning_errors = None + self.is_attached_compute = None + + +class Aks(Compute): + """A Machine Learning compute based on AKS. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param properties: AKS properties. + :type properties: ~azure_machine_learning_workspaces.models.AksProperties + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'AksProperties'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + properties: Optional["AksProperties"] = None, + **kwargs + ): + super(Aks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'AKS' # type: str + self.properties = properties + + +class ComputeSecrets(msrest.serialization.Model): + """Secrets related to a Machine Learning compute. Might differ for every type of compute. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + } + + _subtype_map = { + 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} + } + + def __init__( + self, + **kwargs + ): + super(ComputeSecrets, self).__init__(**kwargs) + self.compute_type = None # type: Optional[str] + + +class AksComputeSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on AKS. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param user_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :type user_kube_config: str + :param admin_kube_config: Content of kubeconfig file that can be used to connect to the + Kubernetes cluster. + :type admin_kube_config: str + :param image_pull_secret_name: Image registry pull secret. + :type image_pull_secret_name: str + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, + 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, + 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + } + + def __init__( + self, + *, + user_kube_config: Optional[str] = None, + admin_kube_config: Optional[str] = None, + image_pull_secret_name: Optional[str] = None, + **kwargs + ): + super(AksComputeSecrets, self).__init__(**kwargs) + self.compute_type = 'AKS' # type: str + self.user_kube_config = user_kube_config + self.admin_kube_config = admin_kube_config + self.image_pull_secret_name = image_pull_secret_name + + +class AksNetworkingConfiguration(msrest.serialization.Model): + """Advance configuration for AKS networking. + + :param subnet_id: Virtual network subnet resource ID the compute nodes belong to. + :type subnet_id: str + :param service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must + not overlap with any Subnet IP ranges. + :type service_cidr: str + :param dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within + the Kubernetes service address range specified in serviceCidr. + :type dns_service_ip: str + :param docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It + must not overlap with any Subnet IP ranges or the Kubernetes service address range. + :type docker_bridge_cidr: str + """ + + _validation = { + 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, + 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + } + + _attribute_map = { + 'subnet_id': {'key': 'subnetId', 'type': 'str'}, + 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, + 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, + 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, + } + + def __init__( + self, + *, + subnet_id: Optional[str] = None, + service_cidr: Optional[str] = None, + dns_service_ip: Optional[str] = None, + docker_bridge_cidr: Optional[str] = None, + **kwargs + ): + super(AksNetworkingConfiguration, self).__init__(**kwargs) + self.subnet_id = subnet_id + self.service_cidr = service_cidr + self.dns_service_ip = dns_service_ip + self.docker_bridge_cidr = docker_bridge_cidr + + +class AksProperties(msrest.serialization.Model): + """AKS properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param cluster_fqdn: Cluster full qualified domain name. + :type cluster_fqdn: str + :ivar system_services: System services. + :vartype system_services: list[~azure_machine_learning_workspaces.models.SystemService] + :param agent_count: Number of agents. + :type agent_count: int + :param agent_vm_size: Agent virtual machine size. + :type agent_vm_size: str + :param ssl_configuration: SSL configuration. + :type ssl_configuration: ~azure_machine_learning_workspaces.models.SSLConfiguration + :param aks_networking_configuration: AKS networking configuration for vnet. + :type aks_networking_configuration: + ~azure_machine_learning_workspaces.models.AksNetworkingConfiguration + """ + + _validation = { + 'system_services': {'readonly': True}, + 'agent_count': {'minimum': 1}, + } + + _attribute_map = { + 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, + 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, + 'agent_count': {'key': 'agentCount', 'type': 'int'}, + 'agent_vm_size': {'key': 'agentVMSize', 'type': 'str'}, + 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SSLConfiguration'}, + 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, + } + + def __init__( + self, + *, + cluster_fqdn: Optional[str] = None, + agent_count: Optional[int] = None, + agent_vm_size: Optional[str] = None, + ssl_configuration: Optional["SSLConfiguration"] = None, + aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None, + **kwargs + ): + super(AksProperties, self).__init__(**kwargs) + self.cluster_fqdn = cluster_fqdn + self.system_services = None + self.agent_count = agent_count + self.agent_vm_size = agent_vm_size + self.ssl_configuration = ssl_configuration + self.aks_networking_configuration = aks_networking_configuration + + +class AmlCompute(Compute): + """An Azure Machine Learning compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param properties: AML Compute properties. + :type properties: ~azure_machine_learning_workspaces.models.AmlComputeProperties + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + properties: Optional["AmlComputeProperties"] = None, + **kwargs + ): + super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'AmlCompute' # type: str + self.properties = properties + + +class AmlComputeNodeInformation(msrest.serialization.Model): + """Compute node information related to a AmlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar node_id: ID of the compute node. + :vartype node_id: str + :ivar private_ip_address: Private IP address of the compute node. + :vartype private_ip_address: str + :ivar public_ip_address: Public IP address of the compute node. + :vartype public_ip_address: str + :ivar port: SSH port number of the node. + :vartype port: int + :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, + leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", + "leaving", "preempted". + :vartype node_state: str or ~azure_machine_learning_workspaces.models.NodeState + :ivar run_id: ID of the Experiment running on the node, if any else null. + :vartype run_id: str + """ + + _validation = { + 'node_id': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'port': {'readonly': True}, + 'node_state': {'readonly': True}, + 'run_id': {'readonly': True}, + } + + _attribute_map = { + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'port': {'key': 'port', 'type': 'int'}, + 'node_state': {'key': 'nodeState', 'type': 'str'}, + 'run_id': {'key': 'runId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlComputeNodeInformation, self).__init__(**kwargs) + self.node_id = None + self.private_ip_address = None + self.public_ip_address = None + self.port = None + self.node_state = None + self.run_id = None + + +class ComputeNodesInformation(msrest.serialization.Model): + """Compute nodes information related to a Machine Learning compute. Might differ for every type of compute. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AmlComputeNodesInformation. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :ivar next_link: The continuation token. + :vartype next_link: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + _subtype_map = { + 'compute_type': {'AmlCompute': 'AmlComputeNodesInformation'} + } + + def __init__( + self, + **kwargs + ): + super(ComputeNodesInformation, self).__init__(**kwargs) + self.compute_type = None # type: Optional[str] + self.next_link = None + + +class AmlComputeNodesInformation(ComputeNodesInformation): + """Compute node information related to a AmlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :ivar next_link: The continuation token. + :vartype next_link: str + :ivar nodes: The collection of returned AmlCompute nodes details. + :vartype nodes: list[~azure_machine_learning_workspaces.models.AmlComputeNodeInformation] + """ + + _validation = { + 'compute_type': {'required': True}, + 'next_link': {'readonly': True}, + 'nodes': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, + } + + def __init__( + self, + **kwargs + ): + super(AmlComputeNodesInformation, self).__init__(**kwargs) + self.compute_type = 'AmlCompute' # type: str + self.nodes = None + + +class AmlComputeProperties(msrest.serialization.Model): + """AML Compute properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param vm_size: Virtual Machine Size. + :type vm_size: str + :param vm_priority: Virtual Machine priority. Possible values include: "Dedicated", + "LowPriority". + :type vm_priority: str or ~azure_machine_learning_workspaces.models.VmPriority + :param scale_settings: Scale settings for AML Compute. + :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings + :param user_account_credentials: Credentials for an administrator user account that will be + created on each compute node. + :type user_account_credentials: + ~azure_machine_learning_workspaces.models.UserAccountCredentials + :param subnet: Virtual network subnet resource ID the compute nodes belong to. + :type subnet: ~azure_machine_learning_workspaces.models.ResourceId + :param remote_login_port_public_access: State of the public SSH port. Possible values are: + Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - + Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - + Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, + else is open all public nodes. It can be default only during cluster creation time, after + creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", + "NotSpecified". Default value: "NotSpecified". + :type remote_login_port_public_access: str or + ~azure_machine_learning_workspaces.models.RemoteLoginPortPublicAccess + :ivar allocation_state: Allocation state of the compute. Possible values are: steady - + Indicates that the compute is not resizing. There are no changes to the number of compute nodes + in the compute in progress. A compute enters this state when it is created and when no + operations are being performed on the compute to change the number of compute nodes. resizing - + Indicates that the compute is resizing; that is, compute nodes are being added to or removed + from the compute. Possible values include: "Steady", "Resizing". + :vartype allocation_state: str or ~azure_machine_learning_workspaces.models.AllocationState + :ivar allocation_state_transition_time: The time at which the compute entered its current + allocation state. + :vartype allocation_state_transition_time: ~datetime.datetime + :ivar errors: Collection of errors encountered by various compute nodes during node setup. + :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar current_node_count: The number of compute nodes currently assigned to the compute. + :vartype current_node_count: int + :ivar target_node_count: The target number of compute nodes for the compute. If the + allocationState is resizing, this property denotes the target node count for the ongoing resize + operation. If the allocationState is steady, this property denotes the target node count for + the previous resize operation. + :vartype target_node_count: int + :ivar node_state_counts: Counts of various node states on the compute. + :vartype node_state_counts: ~azure_machine_learning_workspaces.models.NodeStateCounts + """ + + _validation = { + 'allocation_state': {'readonly': True}, + 'allocation_state_transition_time': {'readonly': True}, + 'errors': {'readonly': True}, + 'current_node_count': {'readonly': True}, + 'target_node_count': {'readonly': True}, + 'node_state_counts': {'readonly': True}, + } + + _attribute_map = { + 'vm_size': {'key': 'vmSize', 'type': 'str'}, + 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, + 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, + 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, + 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, + 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, + 'allocation_state': {'key': 'allocationState', 'type': 'str'}, + 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, + 'errors': {'key': 'errors', 'type': '[MachineLearningServiceError]'}, + 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, + 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, + 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, + } + + def __init__( + self, + *, + vm_size: Optional[str] = None, + vm_priority: Optional[Union[str, "VmPriority"]] = None, + scale_settings: Optional["ScaleSettings"] = None, + user_account_credentials: Optional["UserAccountCredentials"] = None, + subnet: Optional["ResourceId"] = None, + remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified", + **kwargs + ): + super(AmlComputeProperties, self).__init__(**kwargs) + self.vm_size = vm_size + self.vm_priority = vm_priority + self.scale_settings = scale_settings + self.user_account_credentials = user_account_credentials + self.subnet = subnet + self.remote_login_port_public_access = remote_login_port_public_access + self.allocation_state = None + self.allocation_state_transition_time = None + self.errors = None + self.current_node_count = None + self.target_node_count = None + self.node_state_counts = None + + +class AmlUserFeature(msrest.serialization.Model): + """Features enabled for a workspace. + + :param id: Specifies the feature ID. + :type id: str + :param display_name: Specifies the feature name. + :type display_name: str + :param description: Describes the feature for user experience. + :type description: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + display_name: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(AmlUserFeature, self).__init__(**kwargs) + self.id = id + self.display_name = display_name + self.description = description + + +class ClusterUpdateParameters(msrest.serialization.Model): + """AmlCompute update parameters. + + :param scale_settings: Desired scale settings for the amlCompute. + :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings + """ + + _attribute_map = { + 'scale_settings': {'key': 'properties.scaleSettings', 'type': 'ScaleSettings'}, + } + + def __init__( + self, + *, + scale_settings: Optional["ScaleSettings"] = None, + **kwargs + ): + super(ClusterUpdateParameters, self).__init__(**kwargs) + self.scale_settings = scale_settings + + +class ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties(msrest.serialization.Model): + """ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar principal_id: The principal id of user assigned identity. + :vartype principal_id: str + :ivar client_id: The client id of user assigned identity. + :vartype client_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'client_id': {'readonly': True}, + } + + _attribute_map = { + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'client_id': {'key': 'clientId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties, self).__init__(**kwargs) + self.principal_id = None + self.client_id = None + + +class ComputeInstance(Compute): + """An Azure Machine Learning compute instance. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param vm_size: Virtual Machine Size. + :type vm_size: str + :param application_sharing_policy: Policy for sharing applications on this compute instance + among users of parent workspace. If Personal, only the creator can access applications on this + compute instance. When Shared, any workspace user can access applications on this instance + depending on his/her assigned role. Possible values include: "Personal", "Shared". Default + value: "Shared". + :type application_sharing_policy: str or + ~azure_machine_learning_workspaces.models.ApplicationSharingPolicy + :param ssh_settings: Specifies policy and settings for SSH access. + :type ssh_settings: ~azure_machine_learning_workspaces.models.ComputeInstanceSshSettings + :ivar connectivity_endpoints: Describes all connectivity endpoints available for this + ComputeInstance. + :vartype connectivity_endpoints: + ~azure_machine_learning_workspaces.models.ComputeInstanceConnectivityEndpoints + :ivar applications: Describes available applications and their endpoints on this + ComputeInstance. + :vartype applications: + list[~azure_machine_learning_workspaces.models.ComputeInstanceApplication] + :ivar created_by: Describes information on user who created this ComputeInstance. + :vartype created_by: ~azure_machine_learning_workspaces.models.ComputeInstanceCreatedBy + :ivar errors: Collection of errors encountered on this ComputeInstance. + :vartype errors: list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", + "CreateFailed", "Deleting", "Running", "Restarting", "RestartFailed", "JobRunning", + "SettingUp", "Starting", "StartFailed", "StopFailed", "Stopped", "Stopping", "UserSettingUp", + "Unknown", "Unusable". + :vartype state: str or ~azure_machine_learning_workspaces.models.ComputeInstanceState + :param id: The ID of the resource. + :type id: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + 'connectivity_endpoints': {'readonly': True}, + 'applications': {'readonly': True}, + 'created_by': {'readonly': True}, + 'errors': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'vm_size': {'key': 'properties.vmSize', 'type': 'str'}, + 'application_sharing_policy': {'key': 'properties.applicationSharingPolicy', 'type': 'str'}, + 'ssh_settings': {'key': 'properties.sshSettings', 'type': 'ComputeInstanceSshSettings'}, + 'connectivity_endpoints': {'key': 'properties.connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, + 'applications': {'key': 'properties.applications', 'type': '[ComputeInstanceApplication]'}, + 'created_by': {'key': 'properties.createdBy', 'type': 'ComputeInstanceCreatedBy'}, + 'errors': {'key': 'properties.errors', 'type': '[MachineLearningServiceError]'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'id': {'key': 'properties.subnet.id', 'type': 'str'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + vm_size: Optional[str] = None, + application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared", + ssh_settings: Optional["ComputeInstanceSshSettings"] = None, + id: Optional[str] = None, + **kwargs + ): + super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'ComputeInstance' # type: str + self.vm_size = vm_size + self.application_sharing_policy = application_sharing_policy + self.ssh_settings = ssh_settings + self.connectivity_endpoints = None + self.applications = None + self.created_by = None + self.errors = None + self.state = None + self.id = id + + +class ComputeInstanceApplication(msrest.serialization.Model): + """Defines an Aml Instance application and its connectivity endpoint URI. + + :param display_name: Name of the ComputeInstance application. + :type display_name: str + :param endpoint_uri: Application' endpoint URI. + :type endpoint_uri: str + """ + + _attribute_map = { + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + } + + def __init__( + self, + *, + display_name: Optional[str] = None, + endpoint_uri: Optional[str] = None, + **kwargs + ): + super(ComputeInstanceApplication, self).__init__(**kwargs) + self.display_name = display_name + self.endpoint_uri = endpoint_uri + + +class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): + """Defines all connectivity endpoints and properties for a ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar public_ip_address: Public IP Address of this ComputeInstance. + :vartype public_ip_address: str + :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in + which the compute instance is deployed). + :vartype private_ip_address: str + """ + + _validation = { + 'public_ip_address': {'readonly': True}, + 'private_ip_address': {'readonly': True}, + } + + _attribute_map = { + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) + self.public_ip_address = None + self.private_ip_address = None + + +class ComputeInstanceCreatedBy(msrest.serialization.Model): + """Describes information on user who created this ComputeInstance. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_name: Name of the user. + :vartype user_name: str + :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. + :vartype user_org_id: str + :ivar user_id: Uniquely identifies the user within his/her organization. + :vartype user_id: str + """ + + _validation = { + 'user_name': {'readonly': True}, + 'user_org_id': {'readonly': True}, + 'user_id': {'readonly': True}, + } + + _attribute_map = { + 'user_name': {'key': 'userName', 'type': 'str'}, + 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, + 'user_id': {'key': 'userId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ComputeInstanceCreatedBy, self).__init__(**kwargs) + self.user_name = None + self.user_org_id = None + self.user_id = None + + +class ComputeInstanceSshSettings(msrest.serialization.Model): + """Specifies policy and settings for SSH access. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param ssh_public_access: State of the public SSH port. Possible values are: Disabled - + Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the + public ssh port is open and accessible according to the VNet/subnet policy if applicable. + Possible values include: "Enabled", "Disabled". Default value: "Disabled". + :type ssh_public_access: str or ~azure_machine_learning_workspaces.models.SshPublicAccess + :ivar admin_user_name: Describes the admin user name. + :vartype admin_user_name: str + :ivar ssh_port: Describes the port for connecting through SSH. + :vartype ssh_port: int + :param admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t + rsa -b 2048" to generate your SSH key pairs. + :type admin_public_key: str + """ + + _validation = { + 'admin_user_name': {'readonly': True}, + 'ssh_port': {'readonly': True}, + } + + _attribute_map = { + 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'ssh_port': {'key': 'sshPort', 'type': 'int'}, + 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + } + + def __init__( + self, + *, + ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled", + admin_public_key: Optional[str] = None, + **kwargs + ): + super(ComputeInstanceSshSettings, self).__init__(**kwargs) + self.ssh_public_access = ssh_public_access + self.admin_user_name = None + self.ssh_port = None + self.admin_public_key = admin_public_key + + +class Resource(msrest.serialization.Model): + """Azure Resource Manager resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + type_identity_type: Optional[Union[str, "ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.location = location + self.type = None + self.tags = tags + self.sku = sku + self.principal_id = None + self.tenant_id = None + self.type_identity_type = type_identity_type + self.user_assigned_identities = user_assigned_identities + + +class ComputeResource(Resource): + """Machine Learning compute object wrapped into ARM resource envelope. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :param properties: Compute properties. + :type properties: ~azure_machine_learning_workspaces.models.Compute + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'properties': {'key': 'properties', 'type': 'Compute'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + type_identity_type: Optional[Union[str, "ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + properties: Optional["Compute"] = None, + **kwargs + ): + super(ComputeResource, self).__init__(location=location, tags=tags, sku=sku, type_identity_type=type_identity_type, user_assigned_identities=user_assigned_identities, **kwargs) + self.properties = properties + + +class Databricks(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param databricks_access_token: Databricks access token. + :type databricks_access_token: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'databricks_access_token': {'key': 'properties.databricksAccessToken', 'type': 'str'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + databricks_access_token: Optional[str] = None, + **kwargs + ): + super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'Databricks' # type: str + self.databricks_access_token = databricks_access_token + + +class DatabricksComputeSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on Databricks. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param databricks_access_token: access token for databricks account. + :type databricks_access_token: str + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + } + + def __init__( + self, + *, + databricks_access_token: Optional[str] = None, + **kwargs + ): + super(DatabricksComputeSecrets, self).__init__(**kwargs) + self.compute_type = 'Databricks' # type: str + self.databricks_access_token = databricks_access_token + + +class DataFactory(Compute): + """A DataFactory compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + **kwargs + ): + super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'DataFactory' # type: str + + +class DataLakeAnalytics(Compute): + """A DataLakeAnalytics compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param data_lake_store_account_name: DataLake Store Account Name. + :type data_lake_store_account_name: str + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'data_lake_store_account_name': {'key': 'properties.dataLakeStoreAccountName', 'type': 'str'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + data_lake_store_account_name: Optional[str] = None, + **kwargs + ): + super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'DataLakeAnalytics' # type: str + self.data_lake_store_account_name = data_lake_store_account_name + + +class ErrorDetail(msrest.serialization.Model): + """Error detail information. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: str, + message: str, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = code + self.message = message + + +class ErrorResponse(msrest.serialization.Model): + """Error response information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar code: Error code. + :vartype code: str + :ivar message: Error message. + :vartype message: str + :ivar details: An array of error detail objects. + :vartype details: list[~azure_machine_learning_workspaces.models.ErrorDetail] + """ + + _validation = { + 'code': {'readonly': True}, + 'message': {'readonly': True}, + 'details': {'readonly': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = None + self.message = None + self.details = None + + +class HdInsight(Compute): + """A HDInsight compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param ssh_port: Port open for ssh connections on the master node of the cluster. + :type ssh_port: int + :param address: Public IP address of the master node of the cluster. + :type address: str + :param administrator_account: Admin credentials for master node of the cluster. + :type administrator_account: + ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'ssh_port': {'key': 'properties.sshPort', 'type': 'int'}, + 'address': {'key': 'properties.address', 'type': 'str'}, + 'administrator_account': {'key': 'properties.administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + ssh_port: Optional[int] = None, + address: Optional[str] = None, + administrator_account: Optional["VirtualMachineSshCredentials"] = None, + **kwargs + ): + super(HdInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'HDInsight' # type: str + self.ssh_port = ssh_port + self.address = address + self.administrator_account = administrator_account + + +class KeyVaultProperties(msrest.serialization.Model): + """KeyVaultProperties. + + All required parameters must be populated in order to send to Azure. + + :param key_vault_arm_id: Required. The ArmId of the keyVault where the customer owned + encryption key is present. + :type key_vault_arm_id: str + :param key_identifier: Required. Key vault uri to access the encryption key. + :type key_identifier: str + :param identity_client_id: For future use - The client id of the identity which will be used to + access key vault. + :type identity_client_id: str + """ + + _validation = { + 'key_vault_arm_id': {'required': True}, + 'key_identifier': {'required': True}, + } + + _attribute_map = { + 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, + } + + def __init__( + self, + *, + key_vault_arm_id: str, + key_identifier: str, + identity_client_id: Optional[str] = None, + **kwargs + ): + super(KeyVaultProperties, self).__init__(**kwargs) + self.key_vault_arm_id = key_vault_arm_id + self.key_identifier = key_identifier + self.identity_client_id = identity_client_id + + +class ListAmlUserFeatureResult(msrest.serialization.Model): + """The List Aml user feature operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML user facing features. + :vartype value: list[~azure_machine_learning_workspaces.models.AmlUserFeature] + :ivar next_link: The URI to fetch the next page of AML user features information. Call + ListNext() with this to fetch the next page of AML user features information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListAmlUserFeatureResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListUsagesResult(msrest.serialization.Model): + """The List Usages operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of AML resource usages. + :vartype value: list[~azure_machine_learning_workspaces.models.Usage] + :ivar next_link: The URI to fetch the next page of AML resource usage information. Call + ListNext() with this to fetch the next page of AML resource usage information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Usage]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListUsagesResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class ListWorkspaceKeysResult(msrest.serialization.Model): + """ListWorkspaceKeysResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar user_storage_key: + :vartype user_storage_key: str + :ivar user_storage_resource_id: + :vartype user_storage_resource_id: str + :ivar app_insights_instrumentation_key: + :vartype app_insights_instrumentation_key: str + :ivar container_registry_credentials: + :vartype container_registry_credentials: + ~azure_machine_learning_workspaces.models.RegistryListCredentialsResult + :param notebook_access_keys: + :type notebook_access_keys: + ~azure_machine_learning_workspaces.models.NotebookListCredentialsResult + """ + + _validation = { + 'user_storage_key': {'readonly': True}, + 'user_storage_resource_id': {'readonly': True}, + 'app_insights_instrumentation_key': {'readonly': True}, + 'container_registry_credentials': {'readonly': True}, + } + + _attribute_map = { + 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + 'user_storage_resource_id': {'key': 'userStorageResourceId', 'type': 'str'}, + 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, + 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, + 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'NotebookListCredentialsResult'}, + } + + def __init__( + self, + *, + notebook_access_keys: Optional["NotebookListCredentialsResult"] = None, + **kwargs + ): + super(ListWorkspaceKeysResult, self).__init__(**kwargs) + self.user_storage_key = None + self.user_storage_resource_id = None + self.app_insights_instrumentation_key = None + self.container_registry_credentials = None + self.notebook_access_keys = notebook_access_keys + + +class ListWorkspaceQuotas(msrest.serialization.Model): + """The List WorkspaceQuotasByVMFamily operation response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of Workspace Quotas by VM Family. + :vartype value: list[~azure_machine_learning_workspaces.models.ResourceQuota] + :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. + Call ListNext() with this to fetch the next page of Workspace Quota information. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ResourceQuota]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ListWorkspaceQuotas, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class MachineLearningServiceError(msrest.serialization.Model): + """Wrapper for error response to follow ARM guidelines. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar error: The error response. + :vartype error: ~azure_machine_learning_workspaces.models.ErrorResponse + """ + + _validation = { + 'error': {'readonly': True}, + } + + _attribute_map = { + 'error': {'key': 'error', 'type': 'ErrorResponse'}, + } + + def __init__( + self, + **kwargs + ): + super(MachineLearningServiceError, self).__init__(**kwargs) + self.error = None + + +class NodeStateCounts(msrest.serialization.Model): + """Counts of various compute node states on the amlCompute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar idle_node_count: Number of compute nodes in idle state. + :vartype idle_node_count: int + :ivar running_node_count: Number of compute nodes which are running jobs. + :vartype running_node_count: int + :ivar preparing_node_count: Number of compute nodes which are being prepared. + :vartype preparing_node_count: int + :ivar unusable_node_count: Number of compute nodes which are in unusable state. + :vartype unusable_node_count: int + :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. + :vartype leaving_node_count: int + :ivar preempted_node_count: Number of compute nodes which are in preempted state. + :vartype preempted_node_count: int + """ + + _validation = { + 'idle_node_count': {'readonly': True}, + 'running_node_count': {'readonly': True}, + 'preparing_node_count': {'readonly': True}, + 'unusable_node_count': {'readonly': True}, + 'leaving_node_count': {'readonly': True}, + 'preempted_node_count': {'readonly': True}, + } + + _attribute_map = { + 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, + 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, + 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, + 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, + 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, + 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(NodeStateCounts, self).__init__(**kwargs) + self.idle_node_count = None + self.running_node_count = None + self.preparing_node_count = None + self.unusable_node_count = None + self.leaving_node_count = None + self.preempted_node_count = None + + +class NotebookListCredentialsResult(msrest.serialization.Model): + """NotebookListCredentialsResult. + + :param primary_access_key: + :type primary_access_key: str + :param secondary_access_key: + :type secondary_access_key: str + """ + + _attribute_map = { + 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, + 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + } + + def __init__( + self, + *, + primary_access_key: Optional[str] = None, + secondary_access_key: Optional[str] = None, + **kwargs + ): + super(NotebookListCredentialsResult, self).__init__(**kwargs) + self.primary_access_key = primary_access_key + self.secondary_access_key = secondary_access_key + + +class NotebookPreparationError(msrest.serialization.Model): + """NotebookPreparationError. + + :param error_message: + :type error_message: str + :param status_code: + :type status_code: int + """ + + _attribute_map = { + 'error_message': {'key': 'errorMessage', 'type': 'str'}, + 'status_code': {'key': 'statusCode', 'type': 'int'}, + } + + def __init__( + self, + *, + error_message: Optional[str] = None, + status_code: Optional[int] = None, + **kwargs + ): + super(NotebookPreparationError, self).__init__(**kwargs) + self.error_message = error_message + self.status_code = status_code + + +class NotebookResourceInfo(msrest.serialization.Model): + """NotebookResourceInfo. + + :param fqdn: + :type fqdn: str + :param resource_id: the data plane resourceId that used to initialize notebook component. + :type resource_id: str + :param notebook_preparation_error: The error that occurs when preparing notebook. + :type notebook_preparation_error: + ~azure_machine_learning_workspaces.models.NotebookPreparationError + """ + + _attribute_map = { + 'fqdn': {'key': 'fqdn', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, + } + + def __init__( + self, + *, + fqdn: Optional[str] = None, + resource_id: Optional[str] = None, + notebook_preparation_error: Optional["NotebookPreparationError"] = None, + **kwargs + ): + super(NotebookResourceInfo, self).__init__(**kwargs) + self.fqdn = fqdn + self.resource_id = resource_id + self.notebook_preparation_error = notebook_preparation_error + + +class Operation(msrest.serialization.Model): + """Azure Machine Learning workspace REST API operation. + + :param name: Operation name: {provider}/{resource}/{operation}. + :type name: str + :param display: Display name of operation. + :type display: ~azure_machine_learning_workspaces.models.OperationDisplay + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display': {'key': 'display', 'type': 'OperationDisplay'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + display: Optional["OperationDisplay"] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = name + self.display = display + + +class OperationDisplay(msrest.serialization.Model): + """Display name of operation. + + :param provider: The resource provider name: Microsoft.MachineLearningExperimentation. + :type provider: str + :param resource: The resource on which the operation is performed. + :type resource: str + :param operation: The operation that users can perform. + :type operation: str + :param description: The description for the operation. + :type description: str + """ + + _attribute_map = { + 'provider': {'key': 'provider', 'type': 'str'}, + 'resource': {'key': 'resource', 'type': 'str'}, + 'operation': {'key': 'operation', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(OperationDisplay, self).__init__(**kwargs) + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class OperationListResult(msrest.serialization.Model): + """An array of operations supported by the resource provider. + + :param value: List of AML workspace operations supported by the AML workspace resource + provider. + :type value: list[~azure_machine_learning_workspaces.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + *, + value: Optional[List["Operation"]] = None, + **kwargs + ): + super(OperationListResult, self).__init__(**kwargs) + self.value = value + + +class PaginatedComputeResourcesList(msrest.serialization.Model): + """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. + + :param value: An array of Machine Learning compute objects wrapped in ARM resource envelope. + :type value: list[~azure_machine_learning_workspaces.models.ComputeResource] + :param next_link: A continuation link (absolute URI) to the next page of results in the list. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[ComputeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["ComputeResource"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PaginatedComputeResourcesList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class PaginatedWorkspaceConnectionsList(msrest.serialization.Model): + """Paginated list of Workspace connection objects. + + :param value: An array of Workspace connection objects. + :type value: list[~azure_machine_learning_workspaces.models.WorkspaceConnection] + :param next_link: A continuation link (absolute URI) to the next page of results in the list. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[WorkspaceConnection]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["WorkspaceConnection"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(PaginatedWorkspaceConnectionsList, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class Password(msrest.serialization.Model): + """Password. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: + :vartype name: str + :ivar value: + :vartype value: str + """ + + _validation = { + 'name': {'readonly': True}, + 'value': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Password, self).__init__(**kwargs) + self.name = None + self.value = None + + +class PrivateEndpoint(msrest.serialization.Model): + """The Private Endpoint resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The ARM identifier for Private Endpoint. + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PrivateEndpoint, self).__init__(**kwargs) + self.id = None + + +class PrivateEndpointConnection(msrest.serialization.Model): + """The Private Endpoint Connection resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: ResourceId of the private endpoint connection. + :vartype id: str + :ivar name: Friendly name of the private endpoint connection. + :vartype name: str + :ivar type: Resource type of private endpoint connection. + :vartype type: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint + :param private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :type private_link_service_connection_state: + ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Possible values include: "Succeeded", "Creating", "Deleting", "Failed". + :vartype provisioning_state: str or + ~azure_machine_learning_workspaces.models.PrivateEndpointConnectionProvisioningState + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpoint'}, + 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + } + + def __init__( + self, + *, + private_endpoint: Optional["PrivateEndpoint"] = None, + private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, + **kwargs + ): + super(PrivateEndpointConnection, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + self.provisioning_state = None + + +class PrivateLinkResource(Resource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :param required_zone_names: The private link resource Private link DNS zone name. + :type required_zone_names: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + type_identity_type: Optional[Union[str, "ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + required_zone_names: Optional[List[str]] = None, + **kwargs + ): + super(PrivateLinkResource, self).__init__(location=location, tags=tags, sku=sku, type_identity_type=type_identity_type, user_assigned_identities=user_assigned_identities, **kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = required_zone_names + + +class PrivateLinkResourceListResult(msrest.serialization.Model): + """A list of private link resources. + + :param value: Array of private link resources. + :type value: list[~azure_machine_learning_workspaces.models.PrivateLinkResource] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__( + self, + *, + value: Optional[List["PrivateLinkResource"]] = None, + **kwargs + ): + super(PrivateLinkResourceListResult, self).__init__(**kwargs) + self.value = value + + +class PrivateLinkServiceConnectionState(msrest.serialization.Model): + """A collection of information about the state of the connection between service consumer and provider. + + :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", + "Timeout". + :type status: str or + ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus + :param description: The reason for approval/rejection of the connection. + :type description: str + :param actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + **kwargs + ): + super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class QuotaBaseProperties(msrest.serialization.Model): + """The properties for Quota update or retrieval. + + :param id: Specifies the resource ID. + :type id: str + :param type: Specifies the resource type. + :type type: str + :param limit: The maximum permitted quota of the resource. + :type limit: long + :param unit: An enum describing the unit of quota measurement. Possible values include: + "Count". + :type unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + type: Optional[str] = None, + limit: Optional[int] = None, + unit: Optional[Union[str, "QuotaUnit"]] = None, + **kwargs + ): + super(QuotaBaseProperties, self).__init__(**kwargs) + self.id = id + self.type = type + self.limit = limit + self.unit = unit + + +class QuotaUpdateParameters(msrest.serialization.Model): + """Quota update parameters. + + :param value: The list for update quota. + :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, + } + + def __init__( + self, + *, + value: Optional[List["QuotaBaseProperties"]] = None, + **kwargs + ): + super(QuotaUpdateParameters, self).__init__(**kwargs) + self.value = value + + +class RegistryListCredentialsResult(msrest.serialization.Model): + """RegistryListCredentialsResult. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: + :vartype location: str + :ivar username: + :vartype username: str + :param passwords: + :type passwords: list[~azure_machine_learning_workspaces.models.Password] + """ + + _validation = { + 'location': {'readonly': True}, + 'username': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'username': {'key': 'username', 'type': 'str'}, + 'passwords': {'key': 'passwords', 'type': '[Password]'}, + } + + def __init__( + self, + *, + passwords: Optional[List["Password"]] = None, + **kwargs + ): + super(RegistryListCredentialsResult, self).__init__(**kwargs) + self.location = None + self.username = None + self.passwords = passwords + + +class ResourceId(msrest.serialization.Model): + """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. The ID of the resource. + :type id: str + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__( + self, + *, + id: str, + **kwargs + ): + super(ResourceId, self).__init__(**kwargs) + self.id = id + + +class ResourceName(msrest.serialization.Model): + """The Resource Name. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class ResourceQuota(msrest.serialization.Model): + """The quota assigned to a resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar name: Name of the resource. + :vartype name: ~azure_machine_learning_workspaces.models.ResourceName + :ivar limit: The maximum permitted quota of the resource. + :vartype limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'name': {'readonly': True}, + 'limit': {'readonly': True}, + 'unit': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'ResourceName'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceQuota, self).__init__(**kwargs) + self.id = None + self.type = None + self.name = None + self.limit = None + self.unit = None + + +class ResourceSkuLocationInfo(msrest.serialization.Model): + """ResourceSkuLocationInfo. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar location: Location of the SKU. + :vartype location: str + :ivar zones: List of availability zones where the SKU is supported. + :vartype zones: list[str] + :ivar zone_details: Details of capabilities available to a SKU in specific zones. + :vartype zone_details: list[~azure_machine_learning_workspaces.models.ResourceSkuZoneDetails] + """ + + _validation = { + 'location': {'readonly': True}, + 'zones': {'readonly': True}, + 'zone_details': {'readonly': True}, + } + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'zones': {'key': 'zones', 'type': '[str]'}, + 'zone_details': {'key': 'zoneDetails', 'type': '[ResourceSkuZoneDetails]'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceSkuLocationInfo, self).__init__(**kwargs) + self.location = None + self.zones = None + self.zone_details = None + + +class ResourceSkuZoneDetails(msrest.serialization.Model): + """Describes The zonal capabilities of a SKU. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The set of zones that the SKU is available in with the specified capabilities. + :vartype name: list[str] + :ivar capabilities: A list of capabilities that are available for the SKU in the specified list + of zones. + :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability] + """ + + _validation = { + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': '[str]'}, + 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'}, + } + + def __init__( + self, + **kwargs + ): + super(ResourceSkuZoneDetails, self).__init__(**kwargs) + self.name = None + self.capabilities = None + + +class Restriction(msrest.serialization.Model): + """The restriction because of which SKU cannot be used. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar type: The type of restrictions. As of now only possible value for this is location. + :vartype type: str + :ivar values: The value of restrictions. If the restriction type is set to location. This would + be different locations where the SKU is restricted. + :vartype values: list[str] + :param reason_code: The reason for the restriction. Possible values include: "NotSpecified", + "NotAvailableForRegion", "NotAvailableForSubscription". + :type reason_code: str or ~azure_machine_learning_workspaces.models.ReasonCode + """ + + _validation = { + 'type': {'readonly': True}, + 'values': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'values': {'key': 'values', 'type': '[str]'}, + 'reason_code': {'key': 'reasonCode', 'type': 'str'}, + } + + def __init__( + self, + *, + reason_code: Optional[Union[str, "ReasonCode"]] = None, + **kwargs + ): + super(Restriction, self).__init__(**kwargs) + self.type = None + self.values = None + self.reason_code = reason_code + + +class ScaleSettings(msrest.serialization.Model): + """scale settings for AML Compute. + + All required parameters must be populated in order to send to Azure. + + :param max_node_count: Required. Max number of nodes to use. + :type max_node_count: int + :param min_node_count: Min number of nodes to use. + :type min_node_count: int + :param node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. + :type node_idle_time_before_scale_down: ~datetime.timedelta + """ + + _validation = { + 'max_node_count': {'required': True}, + } + + _attribute_map = { + 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, + 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + } + + def __init__( + self, + *, + max_node_count: int, + min_node_count: Optional[int] = 0, + node_idle_time_before_scale_down: Optional[datetime.timedelta] = None, + **kwargs + ): + super(ScaleSettings, self).__init__(**kwargs) + self.max_node_count = max_node_count + self.min_node_count = min_node_count + self.node_idle_time_before_scale_down = node_idle_time_before_scale_down + + +class ServicePrincipalCredentials(msrest.serialization.Model): + """Service principal credentials. + + All required parameters must be populated in order to send to Azure. + + :param client_id: Required. Client Id. + :type client_id: str + :param client_secret: Required. Client secret. + :type client_secret: str + """ + + _validation = { + 'client_id': {'required': True}, + 'client_secret': {'required': True}, + } + + _attribute_map = { + 'client_id': {'key': 'clientId', 'type': 'str'}, + 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + } + + def __init__( + self, + *, + client_id: str, + client_secret: str, + **kwargs + ): + super(ServicePrincipalCredentials, self).__init__(**kwargs) + self.client_id = client_id + self.client_secret = client_secret + + +class SharedPrivateLinkResource(msrest.serialization.Model): + """SharedPrivateLinkResource. + + :param name: Unique name of the private link. + :type name: str + :param private_link_resource_id: The resource id that private link links to. + :type private_link_resource_id: str + :param group_id: The private link resource group id. + :type group_id: str + :param request_message: Request message. + :type request_message: str + :param status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Possible values include: "Pending", "Approved", "Rejected", "Disconnected", + "Timeout". + :type status: str or + ~azure_machine_learning_workspaces.models.PrivateEndpointServiceConnectionStatus + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, + 'group_id': {'key': 'properties.groupId', 'type': 'str'}, + 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, + 'status': {'key': 'properties.status', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + private_link_resource_id: Optional[str] = None, + group_id: Optional[str] = None, + request_message: Optional[str] = None, + status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, + **kwargs + ): + super(SharedPrivateLinkResource, self).__init__(**kwargs) + self.name = name + self.private_link_resource_id = private_link_resource_id + self.group_id = group_id + self.request_message = request_message + self.status = status + + +class Sku(msrest.serialization.Model): + """Sku of the resource. + + :param name: Name of the sku. + :type name: str + :param tier: Tier of the sku like Basic or Enterprise. + :type tier: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'tier': {'key': 'tier', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + tier: Optional[str] = None, + **kwargs + ): + super(Sku, self).__init__(**kwargs) + self.name = name + self.tier = tier + + +class SkuCapability(msrest.serialization.Model): + """Features/user capabilities associated with the sku. + + :param name: Capability/Feature ID. + :type name: str + :param value: Details about the feature/capability. + :type value: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + value: Optional[str] = None, + **kwargs + ): + super(SkuCapability, self).__init__(**kwargs) + self.name = name + self.value = value + + +class SkuListResult(msrest.serialization.Model): + """List of skus with features. + + :param value: + :type value: list[~azure_machine_learning_workspaces.models.WorkspaceSku] + :param next_link: The URI to fetch the next page of Workspace Skus. Call ListNext() with this + URI to fetch the next page of Workspace Skus. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[WorkspaceSku]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["WorkspaceSku"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(SkuListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SkuSettings(msrest.serialization.Model): + """Describes Workspace Sku details and features. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar locations: The set of locations that the SKU is available. This will be supported and + registered Azure Geo Regions (e.g. West US, East US, Southeast Asia, etc.). + :vartype locations: list[str] + :ivar location_info: A list of locations and availability zones in those locations where the + SKU is available. + :vartype location_info: list[~azure_machine_learning_workspaces.models.ResourceSkuLocationInfo] + :ivar tier: Sku Tier like Basic or Enterprise. + :vartype tier: str + :ivar resource_type: + :vartype resource_type: str + :ivar name: + :vartype name: str + :ivar capabilities: List of features/user capabilities associated with the sku. + :vartype capabilities: list[~azure_machine_learning_workspaces.models.SkuCapability] + :param restrictions: The restrictions because of which SKU cannot be used. This is empty if + there are no restrictions. + :type restrictions: list[~azure_machine_learning_workspaces.models.Restriction] + """ + + _validation = { + 'locations': {'readonly': True}, + 'location_info': {'readonly': True}, + 'tier': {'readonly': True}, + 'resource_type': {'readonly': True}, + 'name': {'readonly': True}, + 'capabilities': {'readonly': True}, + } + + _attribute_map = { + 'locations': {'key': 'locations', 'type': '[str]'}, + 'location_info': {'key': 'locationInfo', 'type': '[ResourceSkuLocationInfo]'}, + 'tier': {'key': 'tier', 'type': 'str'}, + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '[SkuCapability]'}, + 'restrictions': {'key': 'restrictions', 'type': '[Restriction]'}, + } + + def __init__( + self, + *, + restrictions: Optional[List["Restriction"]] = None, + **kwargs + ): + super(SkuSettings, self).__init__(**kwargs) + self.locations = None + self.location_info = None + self.tier = None + self.resource_type = None + self.name = None + self.capabilities = None + self.restrictions = restrictions + + +class SSLConfiguration(msrest.serialization.Model): + """The ssl configuration for scoring. + + :param status: Enable or disable ssl for scoring. Possible values include: "Disabled", + "Enabled". + :type status: str or ~azure_machine_learning_workspaces.models.SSLConfigurationStatus + :param cert: Cert data. + :type cert: str + :param key: Key data. + :type key: str + :param cname: CNAME of the cert. + :type cname: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'cert': {'key': 'cert', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'str'}, + 'cname': {'key': 'cname', 'type': 'str'}, + } + + def __init__( + self, + *, + status: Optional[Union[str, "SSLConfigurationStatus"]] = None, + cert: Optional[str] = None, + key: Optional[str] = None, + cname: Optional[str] = None, + **kwargs + ): + super(SSLConfiguration, self).__init__(**kwargs) + self.status = status + self.cert = cert + self.key = key + self.cname = cname + + +class SystemService(msrest.serialization.Model): + """A system service running on a compute. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar system_service_type: The type of this system service. + :vartype system_service_type: str + :ivar public_ip_address: Public IP address. + :vartype public_ip_address: str + :ivar version: The version for this type. + :vartype version: str + """ + + _validation = { + 'system_service_type': {'readonly': True}, + 'public_ip_address': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, + 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemService, self).__init__(**kwargs) + self.system_service_type = None + self.public_ip_address = None + self.version = None + + +class UpdateWorkspaceQuotas(msrest.serialization.Model): + """The properties for update Quota response. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :param limit: The maximum permitted quota of the resource. + :type limit: long + :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype unit: str or ~azure_machine_learning_workspaces.models.QuotaUnit + :param status: Status of update workspace quota. Possible values include: "Undefined", + "Success", "Failure", "InvalidQuotaBelowClusterMinimum", + "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", + "OperationNotEnabledForRegion". + :type status: str or ~azure_machine_learning_workspaces.models.Status + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + *, + limit: Optional[int] = None, + status: Optional[Union[str, "Status"]] = None, + **kwargs + ): + super(UpdateWorkspaceQuotas, self).__init__(**kwargs) + self.id = None + self.type = None + self.limit = limit + self.unit = None + self.status = status + + +class UpdateWorkspaceQuotasResult(msrest.serialization.Model): + """The result of update workspace quota. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The list of workspace quota update result. + :vartype value: list[~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotas] + :ivar next_link: The URI to fetch the next page of workspace quota update result. Call + ListNext() with this to fetch the next page of Workspace Quota update result. + :vartype next_link: str + """ + + _validation = { + 'value': {'readonly': True}, + 'next_link': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) + self.value = None + self.next_link = None + + +class Usage(msrest.serialization.Model): + """Describes AML Resource Usage. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar type: Specifies the resource type. + :vartype type: str + :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". + :vartype unit: str or ~azure_machine_learning_workspaces.models.UsageUnit + :ivar current_value: The current usage of the resource. + :vartype current_value: long + :ivar limit: The maximum permitted usage of the resource. + :vartype limit: long + :ivar name: The name of the type of usage. + :vartype name: ~azure_machine_learning_workspaces.models.UsageName + """ + + _validation = { + 'id': {'readonly': True}, + 'type': {'readonly': True}, + 'unit': {'readonly': True}, + 'current_value': {'readonly': True}, + 'limit': {'readonly': True}, + 'name': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'current_value': {'key': 'currentValue', 'type': 'long'}, + 'limit': {'key': 'limit', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'UsageName'}, + } + + def __init__( + self, + **kwargs + ): + super(Usage, self).__init__(**kwargs) + self.id = None + self.type = None + self.unit = None + self.current_value = None + self.limit = None + self.name = None + + +class UsageName(msrest.serialization.Model): + """The Usage Names. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: The name of the resource. + :vartype value: str + :ivar localized_value: The localized name of the resource. + :vartype localized_value: str + """ + + _validation = { + 'value': {'readonly': True}, + 'localized_value': {'readonly': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': 'str'}, + 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(UsageName, self).__init__(**kwargs) + self.value = None + self.localized_value = None + + +class UserAccountCredentials(msrest.serialization.Model): + """Settings for user account that gets created on each on the nodes of a compute. + + All required parameters must be populated in order to send to Azure. + + :param admin_user_name: Required. Name of the administrator user account which can be used to + SSH to nodes. + :type admin_user_name: str + :param admin_user_ssh_public_key: SSH public key of the administrator user account. + :type admin_user_ssh_public_key: str + :param admin_user_password: Password of the administrator user account. + :type admin_user_password: str + """ + + _validation = { + 'admin_user_name': {'required': True}, + } + + _attribute_map = { + 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, + 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, + 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + } + + def __init__( + self, + *, + admin_user_name: str, + admin_user_ssh_public_key: Optional[str] = None, + admin_user_password: Optional[str] = None, + **kwargs + ): + super(UserAccountCredentials, self).__init__(**kwargs) + self.admin_user_name = admin_user_name + self.admin_user_ssh_public_key = admin_user_ssh_public_key + self.admin_user_password = admin_user_password + + +class VirtualMachine(Compute): + """A Machine Learning compute based on Azure Virtual Machines. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param compute_location: Location for the underlying compute. + :type compute_location: str + :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, + Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param description: The description of the Machine Learning compute. + :type description: str + :ivar created_on: The date and time when the compute was created. + :vartype created_on: ~datetime.datetime + :ivar modified_on: The date and time when the compute was last modified. + :vartype modified_on: ~datetime.datetime + :param resource_id: ARM resource id of the underlying compute. + :type resource_id: str + :ivar provisioning_errors: Errors during provisioning. + :vartype provisioning_errors: + list[~azure_machine_learning_workspaces.models.MachineLearningServiceError] + :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought + from outside if true, or machine learning service provisioned it if false. + :vartype is_attached_compute: bool + :param virtual_machine_size: Virtual Machine size. + :type virtual_machine_size: str + :param ssh_port: Port open for ssh connections. + :type ssh_port: int + :param address: Public IP address of the virtual machine. + :type address: str + :param administrator_account: Admin credentials for virtual machine. + :type administrator_account: + ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials + """ + + _validation = { + 'compute_type': {'required': True}, + 'provisioning_state': {'readonly': True}, + 'created_on': {'readonly': True}, + 'modified_on': {'readonly': True}, + 'provisioning_errors': {'readonly': True}, + 'is_attached_compute': {'readonly': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'compute_location': {'key': 'computeLocation', 'type': 'str'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, + 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[MachineLearningServiceError]'}, + 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, + 'virtual_machine_size': {'key': 'properties.virtualMachineSize', 'type': 'str'}, + 'ssh_port': {'key': 'properties.sshPort', 'type': 'int'}, + 'address': {'key': 'properties.address', 'type': 'str'}, + 'administrator_account': {'key': 'properties.administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + *, + compute_location: Optional[str] = None, + description: Optional[str] = None, + resource_id: Optional[str] = None, + virtual_machine_size: Optional[str] = None, + ssh_port: Optional[int] = None, + address: Optional[str] = None, + administrator_account: Optional["VirtualMachineSshCredentials"] = None, + **kwargs + ): + super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, **kwargs) + self.compute_type = 'VirtualMachine' # type: str + self.virtual_machine_size = virtual_machine_size + self.ssh_port = ssh_port + self.address = address + self.administrator_account = administrator_account + + +class VirtualMachineSecrets(ComputeSecrets): + """Secrets related to a Machine Learning compute based on AKS. + + All required parameters must be populated in order to send to Azure. + + :param compute_type: Required. The type of compute.Constant filled by server. Possible values + include: "AKS", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", + "Databricks", "DataLakeAnalytics". + :type compute_type: str or ~azure_machine_learning_workspaces.models.ComputeType + :param administrator_account: Admin credentials for virtual machine. + :type administrator_account: + ~azure_machine_learning_workspaces.models.VirtualMachineSshCredentials + """ + + _validation = { + 'compute_type': {'required': True}, + } + + _attribute_map = { + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + } + + def __init__( + self, + *, + administrator_account: Optional["VirtualMachineSshCredentials"] = None, + **kwargs + ): + super(VirtualMachineSecrets, self).__init__(**kwargs) + self.compute_type = 'VirtualMachine' # type: str + self.administrator_account = administrator_account + + +class VirtualMachineSize(msrest.serialization.Model): + """Describes the properties of a VM size. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the virtual machine size. + :vartype name: str + :ivar family: The family name of the virtual machine size. + :vartype family: str + :ivar v_cp_us: The number of vCPUs supported by the virtual machine size. + :vartype v_cp_us: int + :ivar gpus: The number of gPUs supported by the virtual machine size. + :vartype gpus: int + :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size. + :vartype os_vhd_size_mb: int + :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine + size. + :vartype max_resource_volume_mb: int + :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size. + :vartype memory_gb: float + :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs. + :vartype low_priority_capable: bool + :ivar premium_io: Specifies if the virtual machine size supports premium IO. + :vartype premium_io: bool + """ + + _validation = { + 'name': {'readonly': True}, + 'family': {'readonly': True}, + 'v_cp_us': {'readonly': True}, + 'gpus': {'readonly': True}, + 'os_vhd_size_mb': {'readonly': True}, + 'max_resource_volume_mb': {'readonly': True}, + 'memory_gb': {'readonly': True}, + 'low_priority_capable': {'readonly': True}, + 'premium_io': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'family': {'key': 'family', 'type': 'str'}, + 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, + 'gpus': {'key': 'gpus', 'type': 'int'}, + 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, + 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, + 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, + 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, + 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(VirtualMachineSize, self).__init__(**kwargs) + self.name = None + self.family = None + self.v_cp_us = None + self.gpus = None + self.os_vhd_size_mb = None + self.max_resource_volume_mb = None + self.memory_gb = None + self.low_priority_capable = None + self.premium_io = None + + +class VirtualMachineSizeListResult(msrest.serialization.Model): + """The List Virtual Machine size operation response. + + :param aml_compute: The list of virtual machine sizes supported by AmlCompute. + :type aml_compute: list[~azure_machine_learning_workspaces.models.VirtualMachineSize] + """ + + _attribute_map = { + 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, + } + + def __init__( + self, + *, + aml_compute: Optional[List["VirtualMachineSize"]] = None, + **kwargs + ): + super(VirtualMachineSizeListResult, self).__init__(**kwargs) + self.aml_compute = aml_compute + + +class VirtualMachineSshCredentials(msrest.serialization.Model): + """Admin credentials for virtual machine. + + :param username: Username of admin account. + :type username: str + :param password: Password of admin account. + :type password: str + :param public_key_data: Public key data. + :type public_key_data: str + :param private_key_data: Private key data. + :type private_key_data: str + """ + + _attribute_map = { + 'username': {'key': 'username', 'type': 'str'}, + 'password': {'key': 'password', 'type': 'str'}, + 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, + 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, + } + + def __init__( + self, + *, + username: Optional[str] = None, + password: Optional[str] = None, + public_key_data: Optional[str] = None, + private_key_data: Optional[str] = None, + **kwargs + ): + super(VirtualMachineSshCredentials, self).__init__(**kwargs) + self.username = username + self.password = password + self.public_key_data = public_key_data + self.private_key_data = private_key_data + + +class Workspace(Resource): + """An object that represents a machine learning workspace. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Specifies the resource ID. + :vartype id: str + :ivar name: Specifies the name of the resource. + :vartype name: str + :param location: Specifies the location of the resource. + :type location: str + :ivar type: Specifies the type of the resource. + :vartype type: str + :param tags: A set of tags. Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :param type_identity_type: The identity type. Possible values include: "SystemAssigned", + "UserAssigned", "SystemAssigned,UserAssigned", "None". + :type type_identity_type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, + ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :ivar workspace_id: The immutable id associated with this workspace. + :vartype workspace_id: str + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. This name in mutable. + :type friendly_name: str + :ivar creation_time: The creation time of the machine learning workspace in ISO8601 format. + :vartype creation_time: ~datetime.datetime + :param key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :type key_vault: str + :param application_insights: ARM id of the application insights associated with this workspace. + This cannot be changed once the workspace has been created. + :type application_insights: str + :param container_registry: ARM id of the container registry associated with this workspace. + This cannot be changed once the workspace has been created. + :type container_registry: str + :param storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :type storage_account: str + :param discovery_url: Url for the discovery service to identify regional endpoints for machine + learning experimentation services. + :type discovery_url: str + :ivar provisioning_state: The current deployment state of workspace resource. The + provisioningState is to indicate states for resource provisioning. Possible values include: + "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + :vartype provisioning_state: str or ~azure_machine_learning_workspaces.models.ProvisioningState + :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + collected by the service. + :type hbi_workspace: bool + :ivar service_provisioned_resource_group: The name of the managed resource group created by + workspace RP in customer subscription if the workspace is CMK workspace. + :vartype service_provisioned_resource_group: str + :ivar private_link_count: Count of private connections in the workspace. + :vartype private_link_count: int + :param image_build_compute: The compute name for image build. + :type image_build_compute: str + :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :type allow_public_access_when_behind_vnet: bool + :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. + :vartype private_endpoint_connections: + list[~azure_machine_learning_workspaces.models.PrivateEndpointConnection] + :param shared_private_link_resources: The list of shared private link resources in this + workspace. + :type shared_private_link_resources: + list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource] + :ivar notebook_info: The notebook info of Azure ML workspace. + :vartype notebook_info: ~azure_machine_learning_workspaces.models.NotebookResourceInfo + :param status: Indicates whether or not the encryption is enabled for the workspace. Possible + values include: "Enabled", "Disabled". + :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus + :param key_vault_properties: Customer Key vault properties. + :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + 'workspace_id': {'readonly': True}, + 'creation_time': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'service_provisioned_resource_group': {'readonly': True}, + 'private_link_count': {'readonly': True}, + 'private_endpoint_connections': {'readonly': True}, + 'notebook_info': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'principal_id': {'key': 'identity.principalId', 'type': 'str'}, + 'tenant_id': {'key': 'identity.tenantId', 'type': 'str'}, + 'type_identity_type': {'key': 'identity.type', 'type': 'str'}, + 'user_assigned_identities': {'key': 'identity.userAssignedIdentities', 'type': '{ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties}'}, + 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + 'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'}, + 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, + 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, + 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, + 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, + 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, + 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, + 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, + 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, + 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, + 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, + 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, + 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, + 'status': {'key': 'properties.encryption.status', 'type': 'str'}, + 'key_vault_properties': {'key': 'properties.encryption.keyVaultProperties', 'type': 'KeyVaultProperties'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + type_identity_type: Optional[Union[str, "ResourceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, "ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] = None, + description: Optional[str] = None, + friendly_name: Optional[str] = None, + key_vault: Optional[str] = None, + application_insights: Optional[str] = None, + container_registry: Optional[str] = None, + storage_account: Optional[str] = None, + discovery_url: Optional[str] = None, + hbi_workspace: Optional[bool] = False, + image_build_compute: Optional[str] = None, + allow_public_access_when_behind_vnet: Optional[bool] = False, + shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None, + status: Optional[Union[str, "EncryptionStatus"]] = None, + key_vault_properties: Optional["KeyVaultProperties"] = None, + **kwargs + ): + super(Workspace, self).__init__(location=location, tags=tags, sku=sku, type_identity_type=type_identity_type, user_assigned_identities=user_assigned_identities, **kwargs) + self.workspace_id = None + self.description = description + self.friendly_name = friendly_name + self.creation_time = None + self.key_vault = key_vault + self.application_insights = application_insights + self.container_registry = container_registry + self.storage_account = storage_account + self.discovery_url = discovery_url + self.provisioning_state = None + self.hbi_workspace = hbi_workspace + self.service_provisioned_resource_group = None + self.private_link_count = None + self.image_build_compute = image_build_compute + self.allow_public_access_when_behind_vnet = allow_public_access_when_behind_vnet + self.private_endpoint_connections = None + self.shared_private_link_resources = shared_private_link_resources + self.notebook_info = None + self.status = status + self.key_vault_properties = key_vault_properties + + +class WorkspaceConnection(msrest.serialization.Model): + """Workspace connection. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: ResourceId of the workspace connection. + :vartype id: str + :ivar name: Friendly name of the workspace connection. + :vartype name: str + :ivar type: Resource type of workspace connection. + :vartype type: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'target': {'key': 'properties.target', 'type': 'str'}, + 'auth_type': {'key': 'properties.authType', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + } + + def __init__( + self, + *, + category: Optional[str] = None, + target: Optional[str] = None, + auth_type: Optional[str] = None, + value: Optional[str] = None, + **kwargs + ): + super(WorkspaceConnection, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.category = category + self.target = target + self.auth_type = auth_type + self.value = value + + +class WorkspaceConnectionDto(msrest.serialization.Model): + """object used for creating workspace connection. + + :param name: Friendly name of the workspace connection. + :type name: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'category': {'key': 'properties.category', 'type': 'str'}, + 'target': {'key': 'properties.target', 'type': 'str'}, + 'auth_type': {'key': 'properties.authType', 'type': 'str'}, + 'value': {'key': 'properties.value', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + category: Optional[str] = None, + target: Optional[str] = None, + auth_type: Optional[str] = None, + value: Optional[str] = None, + **kwargs + ): + super(WorkspaceConnectionDto, self).__init__(**kwargs) + self.name = name + self.category = category + self.target = target + self.auth_type = auth_type + self.value = value + + +class WorkspaceListResult(msrest.serialization.Model): + """The result of a request to list machine learning workspaces. + + :param value: The list of machine learning workspaces. Since this list may be incomplete, the + nextLink field should be used to request the next list of machine learning workspaces. + :type value: list[~azure_machine_learning_workspaces.models.Workspace] + :param next_link: The URI that can be used to request the next list of machine learning + workspaces. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Workspace]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["Workspace"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(WorkspaceListResult, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class WorkspaceSku(msrest.serialization.Model): + """AML workspace sku information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar resource_type: + :vartype resource_type: str + :ivar skus: The list of workspace sku settings. + :vartype skus: list[~azure_machine_learning_workspaces.models.SkuSettings] + """ + + _validation = { + 'resource_type': {'readonly': True}, + 'skus': {'readonly': True}, + } + + _attribute_map = { + 'resource_type': {'key': 'resourceType', 'type': 'str'}, + 'skus': {'key': 'skus', 'type': '[SkuSettings]'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceSku, self).__init__(**kwargs) + self.resource_type = None + self.skus = None + + +class WorkspaceUpdateParameters(msrest.serialization.Model): + """The parameters for updating a machine learning workspace. + + :param tags: A set of tags. The resource tags for the machine learning workspace. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. + :type friendly_name: str + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'sku': {'key': 'sku', 'type': 'Sku'}, + 'description': {'key': 'properties.description', 'type': 'str'}, + 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + sku: Optional["Sku"] = None, + description: Optional[str] = None, + friendly_name: Optional[str] = None, + **kwargs + ): + super(WorkspaceUpdateParameters, self).__init__(**kwargs) + self.tags = tags + self.sku = sku + self.description = description + self.friendly_name = friendly_name diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py new file mode 100644 index 00000000000..92cb2cd6019 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/__init__.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operation_operations import OperationOperations +from ._workspace_operations import WorkspaceOperations +from ._workspace_feature_operations import WorkspaceFeatureOperations +from ._notebook_operations import NotebookOperations +from ._usage_operations import UsageOperations +from ._virtual_machine_size_operations import VirtualMachineSizeOperations +from ._quota_operations import QuotaOperations +from ._workspace_connection_operations import WorkspaceConnectionOperations +from ._machine_learning_compute_operations import MachineLearningComputeOperations +from ._azure_machine_learning_workspaces_operations import AzureMachineLearningWorkspacesOperationsMixin +from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations +from ._private_link_resource_operations import PrivateLinkResourceOperations + +__all__ = [ + 'OperationOperations', + 'WorkspaceOperations', + 'WorkspaceFeatureOperations', + 'NotebookOperations', + 'UsageOperations', + 'VirtualMachineSizeOperations', + 'QuotaOperations', + 'WorkspaceConnectionOperations', + 'MachineLearningComputeOperations', + 'AzureMachineLearningWorkspacesOperationsMixin', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourceOperations', +] diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py new file mode 100644 index 00000000000..3d41790296c --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_azure_machine_learning_workspaces_operations.py @@ -0,0 +1,91 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class AzureMachineLearningWorkspacesOperationsMixin(object): + + def list_sku( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.SkuListResult"] + """Lists all skus with associated features. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either SkuListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.SkuListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.SkuListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_sku.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('SkuListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_sku.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces/skus'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py new file mode 100644 index 00000000000..9b7bbaa9115 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_machine_learning_compute_operations.py @@ -0,0 +1,893 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class MachineLearningComputeOperations(object): + """MachineLearningComputeOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + skiptoken=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.PaginatedComputeResourcesList"] + """Gets computes in specified workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param skiptoken: Continuation token for pagination. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PaginatedComputeResourcesList or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedComputeResourcesList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedComputeResourcesList"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PaginatedComputeResourcesList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.ComputeResource" + """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are + not returned - use 'keys' nested resource to get them. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeResource, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.ComputeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + location=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + sku=None, # type: Optional["models.Sku"] + type=None, # type: Optional[Union[str, "models.ResourceIdentityType"]] + user_assigned_identities=None, # type: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] + properties=None, # type: Optional["models.Compute"] + **kwargs # type: Any + ): + # type: (...) -> "models.ComputeResource" + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.ComputeResource(location=location, tags=tags, sku=sku, type_identity_type=type, user_assigned_identities=user_assigned_identities, properties=properties) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'ComputeResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if response.status_code == 201: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + location=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + sku=None, # type: Optional["models.Sku"] + type=None, # type: Optional[Union[str, "models.ResourceIdentityType"]] + user_assigned_identities=None, # type: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] + properties=None, # type: Optional["models.Compute"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.ComputeResource"] + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param type: The identity type. + :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :param properties: Compute properties. + :type properties: ~azure_machine_learning_workspaces.models.Compute + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + properties=properties, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def _update_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + scale_settings=None, # type: Optional["models.ScaleSettings"] + **kwargs # type: Any + ): + # type: (...) -> "models.ComputeResource" + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.ClusterUpdateParameters(scale_settings=scale_settings) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'ClusterUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def begin_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + scale_settings=None, # type: Optional["models.ScaleSettings"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.ComputeResource"] + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param scale_settings: Desired scale settings for the amlCompute. + :type scale_settings: ~azure_machine_learning_workspaces.models.ScaleSettings + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeResource"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + scale_settings=scale_settings, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('ComputeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"] + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + query_parameters['underlyingResourceAction'] = self._serialize.query("underlying_resource_action", underlying_resource_action, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) + response_headers['Location']=self._deserialize('str', response.headers.get('Location')) + + if cls: + return cls(pipeline_response, None, response_headers) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + underlying_resource_action, # type: Union[str, "models.UnderlyingResourceAction"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes specified Machine Learning compute. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the + underlying compute from workspace if 'Detach'. + :type underlying_resource_action: str or ~azure_machine_learning_workspaces.models.UnderlyingResourceAction + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + compute_name=compute_name, + underlying_resource_action=underlying_resource_action, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}'} # type: ignore + + def list_node( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.AmlComputeNodesInformation" + """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AmlComputeNodesInformation, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.AmlComputeNodesInformation + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.AmlComputeNodesInformation"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_node.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('AmlComputeNodesInformation', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_node.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes'} # type: ignore + + def list_key( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.ComputeSecrets" + """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComputeSecrets, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.ComputeSecrets + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ComputeSecrets"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ComputeSecrets', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys'} # type: ignore + + def start( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Posts a start action to a compute instance. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.start.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start'} # type: ignore + + def stop( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Posts a stop action to a compute instance. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.stop.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop'} # type: ignore + + def restart( + self, + resource_group_name, # type: str + workspace_name, # type: str + compute_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Posts a restart action to a compute instance. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. + :type compute_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.restart.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'computeName': self._serialize.url("compute_name", compute_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebook_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebook_operations.py new file mode 100644 index 00000000000..a249c95e1df --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_notebook_operations.py @@ -0,0 +1,157 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class NotebookOperations(object): + """NotebookOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def _prepare_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Optional["models.NotebookResourceInfo"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.NotebookResourceInfo"]] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._prepare_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _prepare_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore + + def begin_prepare( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.NotebookResourceInfo"] + """prepare. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either NotebookResourceInfo or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.NotebookResourceInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.NotebookResourceInfo"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._prepare_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_prepare.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operation_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operation_operations.py new file mode 100644 index 00000000000..3c812bdd26f --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_operation_operations.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class OperationOperations(object): + """OperationOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.OperationListResult"] + """Lists all of the available Azure Machine Learning Workspaces REST API operations. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either OperationListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.OperationListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('OperationListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.MachineLearningServices/operations'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connection_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connection_operations.py new file mode 100644 index 00000000000..e3af58654bb --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_endpoint_connection_operations.py @@ -0,0 +1,294 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateEndpointConnectionOperations(object): + """PrivateEndpointConnectionOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateEndpointConnection" + """Gets the specified private endpoint connection associated with the workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def put( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + private_endpoint=None, # type: Optional["models.PrivateEndpoint"] + private_link_service_connection_state=None, # type: Optional["models.PrivateLinkServiceConnectionState"] + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateEndpointConnection" + """Update the state of specified private endpoint connection associated with the workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. + :type private_endpoint_connection_name: str + :param private_endpoint: The resource of private end point. + :type private_endpoint: ~azure_machine_learning_workspaces.models.PrivateEndpoint + :param private_link_service_connection_state: A collection of information about the state of + the connection between service consumer and provider. + :type private_link_service_connection_state: ~azure_machine_learning_workspaces.models.PrivateLinkServiceConnectionState + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.PrivateEndpointConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateEndpointConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _properties = models.PrivateEndpointConnection(private_endpoint=private_endpoint, private_link_service_connection_state=private_link_service_connection_state) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.put.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_properties, 'PrivateEndpointConnection') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + put.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + private_endpoint_connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes the specified private endpoint connection associated with the workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the workspace. + :type private_endpoint_connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resource_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resource_operations.py new file mode 100644 index 00000000000..e1028654952 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_private_link_resource_operations.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class PrivateLinkResourceOperations(object): + """PrivateLinkResourceOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_workspace( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.PrivateLinkResourceListResult" + """Gets the private link resources that need to be created for a workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateLinkResourceListResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.PrivateLinkResourceListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PrivateLinkResourceListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_by_workspace.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('PrivateLinkResourceListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quota_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quota_operations.py new file mode 100644 index 00000000000..f4512072874 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_quota_operations.py @@ -0,0 +1,178 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class QuotaOperations(object): + """QuotaOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def update( + self, + location, # type: str + value=None, # type: Optional[List["models.QuotaBaseProperties"]] + **kwargs # type: Any + ): + # type: (...) -> "models.UpdateWorkspaceQuotasResult" + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. + :type location: str + :param value: The list for update quota. + :type value: list[~azure_machine_learning_workspaces.models.QuotaBaseProperties] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.UpdateWorkspaceQuotasResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.UpdateWorkspaceQuotasResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.QuotaUpdateParameters(value=value) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'QuotaUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas'} # type: ignore + + def list( + self, + location, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ListWorkspaceQuotas"] + """Gets the currently assigned Workspace Quotas based on VMFamily. + + :param location: The location for which resource usage is queried. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListWorkspaceQuotas] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceQuotas"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListWorkspaceQuotas', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/Quotas'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usage_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usage_operations.py new file mode 100644 index 00000000000..10042c0847c --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_usage_operations.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class UsageOperations(object): + """UsageOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + location, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ListUsagesResult"] + """Gets the current usage information as well as limits for AML resources for given subscription + and location. + + :param location: The location for which resource usage is queried. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListUsagesResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListUsagesResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListUsagesResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListUsagesResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_size_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_size_operations.py new file mode 100644 index 00000000000..3eaf3e28a56 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_virtual_machine_size_operations.py @@ -0,0 +1,97 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class VirtualMachineSizeOperations(object): + """VirtualMachineSizeOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + location, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.VirtualMachineSizeListResult" + """Returns supported VM Sizes in a location. + + :param location: The location upon which virtual-machine-sizes is queried. + :type location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: VirtualMachineSizeListResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.VirtualMachineSizeListResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualMachineSizeListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'location': self._serialize.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connection_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connection_operations.py new file mode 100644 index 00000000000..79b16d50a70 --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_connection_operations.py @@ -0,0 +1,331 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceConnectionOperations(object): + """WorkspaceConnectionOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + target=None, # type: Optional[str] + category=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.PaginatedWorkspaceConnectionsList"] + """List all connections under a AML workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param target: Target of the workspace connection. + :type target: str + :param category: Category of the workspace connection. + :type category: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either PaginatedWorkspaceConnectionsList or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.PaginatedWorkspaceConnectionsList] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.PaginatedWorkspaceConnectionsList"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if target is not None: + query_parameters['target'] = self._serialize.query("target", target, 'str') + if category is not None: + query_parameters['category'] = self._serialize.query("category", category, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('PaginatedWorkspaceConnectionsList', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections'} # type: ignore + + def create( + self, + resource_group_name, # type: str + workspace_name, # type: str + connection_name, # type: str + name=None, # type: Optional[str] + category=None, # type: Optional[str] + target=None, # type: Optional[str] + auth_type=None, # type: Optional[str] + value=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.WorkspaceConnection" + """Add a new workspace connection. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :param name: Friendly name of the workspace connection. + :type name: str + :param category: Category of the workspace connection. + :type category: str + :param target: Target of the workspace connection. + :type target: str + :param auth_type: Authorization type of the workspace connection. + :type auth_type: str + :param value: Value details of the workspace connection. + :type value: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.WorkspaceConnectionDto(name=name, category=category, target=target, auth_type=auth_type, value=value) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'WorkspaceConnectionDto') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.WorkspaceConnection" + """Get the detail of a workspace connection. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnection, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.WorkspaceConnection + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceConnection"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('WorkspaceConnection', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + connection_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Delete a workspace connection. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. + :type connection_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + 'connectionName': self._serialize.url("connection_name", connection_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_feature_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_feature_operations.py new file mode 100644 index 00000000000..82c6af2fb4b --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_feature_operations.py @@ -0,0 +1,119 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceFeatureOperations(object): + """WorkspaceFeatureOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.ListAmlUserFeatureResult"] + """Lists all enabled features for a workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListAmlUserFeatureResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.ListAmlUserFeatureResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListAmlUserFeatureResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListAmlUserFeatureResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py new file mode 100644 index 00000000000..f64f07ed0da --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/operations/_workspace_operations.py @@ -0,0 +1,760 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceOperations(object): + """WorkspaceOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure_machine_learning_workspaces.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.Workspace" + """Gets the properties of the specified machine learning workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + location=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + sku=None, # type: Optional["models.Sku"] + type=None, # type: Optional[Union[str, "models.ResourceIdentityType"]] + user_assigned_identities=None, # type: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] + description=None, # type: Optional[str] + friendly_name=None, # type: Optional[str] + key_vault=None, # type: Optional[str] + application_insights=None, # type: Optional[str] + container_registry=None, # type: Optional[str] + storage_account=None, # type: Optional[str] + discovery_url=None, # type: Optional[str] + hbi_workspace=False, # type: Optional[bool] + image_build_compute=None, # type: Optional[str] + allow_public_access_when_behind_vnet=False, # type: Optional[bool] + shared_private_link_resources=None, # type: Optional[List["models.SharedPrivateLinkResource"]] + status=None, # type: Optional[Union[str, "models.EncryptionStatus"]] + key_vault_properties=None, # type: Optional["models.KeyVaultProperties"] + **kwargs # type: Any + ): + # type: (...) -> Optional["models.Workspace"] + cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Workspace"]] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.Workspace(location=location, tags=tags, sku=sku, type_identity_type=type, user_assigned_identities=user_assigned_identities, description=description, friendly_name=friendly_name, key_vault=key_vault, application_insights=application_insights, container_registry=container_registry, storage_account=storage_account, discovery_url=discovery_url, hbi_workspace=hbi_workspace, image_build_compute=image_build_compute, allow_public_access_when_behind_vnet=allow_public_access_when_behind_vnet, shared_private_link_resources=shared_private_link_resources, status=status, key_vault_properties=key_vault_properties) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self._create_or_update_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'Workspace') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('Workspace', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def begin_create_or_update( + self, + resource_group_name, # type: str + workspace_name, # type: str + location=None, # type: Optional[str] + tags=None, # type: Optional[Dict[str, str]] + sku=None, # type: Optional["models.Sku"] + type=None, # type: Optional[Union[str, "models.ResourceIdentityType"]] + user_assigned_identities=None, # type: Optional[Dict[str, "models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties"]] + description=None, # type: Optional[str] + friendly_name=None, # type: Optional[str] + key_vault=None, # type: Optional[str] + application_insights=None, # type: Optional[str] + container_registry=None, # type: Optional[str] + storage_account=None, # type: Optional[str] + discovery_url=None, # type: Optional[str] + hbi_workspace=False, # type: Optional[bool] + image_build_compute=None, # type: Optional[str] + allow_public_access_when_behind_vnet=False, # type: Optional[bool] + shared_private_link_resources=None, # type: Optional[List["models.SharedPrivateLinkResource"]] + status=None, # type: Optional[Union[str, "models.EncryptionStatus"]] + key_vault_properties=None, # type: Optional["models.KeyVaultProperties"] + **kwargs # type: Any + ): + # type: (...) -> LROPoller["models.Workspace"] + """Creates or updates a workspace with the specified parameters. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param location: Specifies the location of the resource. + :type location: str + :param tags: Contains resource tags defined as key/value pairs. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param type: The identity type. + :type type: str or ~azure_machine_learning_workspaces.models.ResourceIdentityType + :param user_assigned_identities: The list of user identities associated with resource. The user + identity dictionary key references will be ARM resource ids in the form: + '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. + :type user_assigned_identities: dict[str, ~azure_machine_learning_workspaces.models.ComponentsSgqdofSchemasIdentityPropertiesUserassignedidentitiesAdditionalproperties] + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. This name in mutable. + :type friendly_name: str + :param key_vault: ARM id of the key vault associated with this workspace. This cannot be + changed once the workspace has been created. + :type key_vault: str + :param application_insights: ARM id of the application insights associated with this workspace. + This cannot be changed once the workspace has been created. + :type application_insights: str + :param container_registry: ARM id of the container registry associated with this workspace. + This cannot be changed once the workspace has been created. + :type container_registry: str + :param storage_account: ARM id of the storage account associated with this workspace. This + cannot be changed once the workspace has been created. + :type storage_account: str + :param discovery_url: Url for the discovery service to identify regional endpoints for machine + learning experimentation services. + :type discovery_url: str + :param hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data + collected by the service. + :type hbi_workspace: bool + :param image_build_compute: The compute name for image build. + :type image_build_compute: str + :param allow_public_access_when_behind_vnet: The flag to indicate whether to allow public + access when behind VNet. + :type allow_public_access_when_behind_vnet: bool + :param shared_private_link_resources: The list of shared private link resources in this + workspace. + :type shared_private_link_resources: list[~azure_machine_learning_workspaces.models.SharedPrivateLinkResource] + :param status: Indicates whether or not the encryption is enabled for the workspace. + :type status: str or ~azure_machine_learning_workspaces.models.EncryptionStatus + :param key_vault_properties: Customer Key vault properties. + :type key_vault_properties: ~azure_machine_learning_workspaces.models.KeyVaultProperties + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure_machine_learning_workspaces.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + location=location, + tags=tags, + sku=sku, + type=type, + user_assigned_identities=user_assigned_identities, + description=description, + friendly_name=friendly_name, + key_vault=key_vault, + application_insights=application_insights, + container_registry=container_registry, + storage_account=storage_account, + discovery_url=discovery_url, + hbi_workspace=hbi_workspace, + image_build_compute=image_build_compute, + allow_public_access_when_behind_vnet=allow_public_access_when_behind_vnet, + shared_private_link_resources=shared_private_link_resources, + status=status, + key_vault_properties=key_vault_properties, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def _delete_initial( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self._delete_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def begin_delete( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller[None] + """Deletes a machine learning workspace. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + cls=lambda x,y,z: x, + **kwargs + ) + + kwargs.pop('error_map', None) + kwargs.pop('content_type', None) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output + ) + else: + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + workspace_name, # type: str + tags=None, # type: Optional[Dict[str, str]] + sku=None, # type: Optional["models.Sku"] + description=None, # type: Optional[str] + friendly_name=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.Workspace" + """Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :param tags: The resource tags for the machine learning workspace. + :type tags: dict[str, str] + :param sku: The sku of the workspace. + :type sku: ~azure_machine_learning_workspaces.models.Sku + :param description: The description of this workspace. + :type description: str + :param friendly_name: The friendly name for this workspace. + :type friendly_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Workspace, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.Workspace + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.Workspace"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _parameters = models.WorkspaceUpdateParameters(tags=tags, sku=sku, description=description, friendly_name=friendly_name) + api_version = "2020-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_parameters, 'WorkspaceUpdateParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Workspace', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + skiptoken=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListResult"] + """Lists all the available machine learning workspaces under the specified resource group. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param skiptoken: Continuation token for pagination. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore + + def list_key( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.ListWorkspaceKeysResult" + """Lists all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ListWorkspaceKeysResult, or the result of cls(response) + :rtype: ~azure_machine_learning_workspaces.models.ListWorkspaceKeysResult + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.ListWorkspaceKeysResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.list_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys'} # type: ignore + + def resync_key( + self, + resource_group_name, # type: str + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Resync all the keys associated with this workspace. This includes keys for the storage account, + app insights and password for container registry. + + :param resource_group_name: Name of the resource group in which workspace is located. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + # Construct URL + url = self.resync_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(models.MachineLearningServiceError, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + resync_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys'} # type: ignore + + def list_by_subscription( + self, + skiptoken=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.WorkspaceListResult"] + """Lists all the available machine learning workspaces under the specified subscription. + + :param skiptoken: Continuation token for pagination. + :type skiptoken: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure_machine_learning_workspaces.models.WorkspaceListResult] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.WorkspaceListResult"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2020-06-01" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + if skiptoken is not None: + query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize(models.MachineLearningServiceError, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces'} # type: ignore diff --git a/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed new file mode 100644 index 00000000000..e5aff4f83af --- /dev/null +++ b/src/machinelearningservices/azext_machinelearningservices/vendored_sdks/machinelearningservices/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/src/machinelearningservices/report.md b/src/machinelearningservices/report.md new file mode 100644 index 00000000000..13a5da0f837 --- /dev/null +++ b/src/machinelearningservices/report.md @@ -0,0 +1,911 @@ +# Azure CLI Module Creation Report + +### machinelearningservices list-sku + +list-sku a machinelearningservices . + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices || + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list-sku|ListSkus| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| + +### machinelearningservices machine-learning-compute aks create + +aks create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|aks create|CreateOrUpdate#Create#AKS| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|ak_s_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|ak_s_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|ak_s_resource_id|resourceId| +|**--properties-properties**|object|AKS properties|ak_s_properties|properties| + +### machinelearningservices machine-learning-compute aml-compute create + +aml-compute create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|aml-compute create|CreateOrUpdate#Create#AmlCompute| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|aml_compute_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|aml_compute_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|aml_compute_resource_id|resourceId| +|**--properties-properties**|object|AML Compute properties|aml_compute_properties|properties| + +### machinelearningservices machine-learning-compute compute-instance create + +compute-instance create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|compute-instance create|CreateOrUpdate#Create#ComputeInstance| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|compute_instance_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|compute_instance_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|compute_instance_resource_id|resourceId| +|**--vm-size**|string|Virtual Machine Size|compute_instance_vm_size|vmSize| +|**--application-sharing-policy**|choice|Policy for sharing applications on this compute instance among users of parent workspace. If Personal, only the creator can access applications on this compute instance. When Shared, any workspace user can access applications on this instance depending on his/her assigned role.|compute_instance_application_sharing_policy|applicationSharingPolicy| +|**--ssh-settings**|object|Specifies policy and settings for SSH access.|compute_instance_ssh_settings|sshSettings| +|**--subnet-id**|string|The ID of the resource|compute_instance_id|id| + +### machinelearningservices machine-learning-compute data-factory create + +data-factory create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|data-factory create|CreateOrUpdate#Create#DataFactory| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|data_factory_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|data_factory_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|data_factory_resource_id|resourceId| + +### machinelearningservices machine-learning-compute data-lake-analytics create + +data-lake-analytics create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|data-lake-analytics create|CreateOrUpdate#Create#DataLakeAnalytics| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|data_lake_analytics_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|data_lake_analytics_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|data_lake_analytics_resource_id|resourceId| +|**--data-lake-store-account-name**|string|DataLake Store Account Name|data_lake_analytics_data_lake_store_account_name|dataLakeStoreAccountName| + +### machinelearningservices machine-learning-compute databricks create + +databricks create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|databricks create|CreateOrUpdate#Create#Databricks| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|databricks_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|databricks_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|databricks_resource_id|resourceId| +|**--databricks-access-token**|string|Databricks access token|databricks_databricks_access_token|databricksAccessToken| + +### machinelearningservices machine-learning-compute delete + +delete a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|delete|Delete| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--underlying-resource-action**|choice|Delete the underlying compute if 'Delete', or detach the underlying compute from workspace if 'Detach'.|underlying_resource_action|underlyingResourceAction| + +### machinelearningservices machine-learning-compute hd-insight create + +hd-insight create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|hd-insight create|CreateOrUpdate#Create#HDInsight| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|hd_insight_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|hd_insight_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|hd_insight_resource_id|resourceId| +|**--ssh-port**|integer|Port open for ssh connections on the master node of the cluster.|hd_insight_ssh_port|sshPort| +|**--address**|string|Public IP address of the master node of the cluster.|hd_insight_address|address| +|**--administrator-account**|object|Admin credentials for master node of the cluster|hd_insight_administrator_account|administratorAccount| + +### machinelearningservices machine-learning-compute list + +list a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|ListByWorkspace| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken| + +### machinelearningservices machine-learning-compute list-key + +list-key a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list-key|ListKeys| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| + +### machinelearningservices machine-learning-compute list-node + +list-node a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list-node|ListNodes| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| + +### machinelearningservices machine-learning-compute restart + +restart a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|restart|Restart| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| + +### machinelearningservices machine-learning-compute show + +show a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|show|Get| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| + +### machinelearningservices machine-learning-compute start + +start a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|start|Start| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| + +### machinelearningservices machine-learning-compute stop + +stop a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|stop|Stop| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| + +### machinelearningservices machine-learning-compute update + +update a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|update|Update| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--scale-settings**|object|Desired scale settings for the amlCompute.|scale_settings|scaleSettings| + +### machinelearningservices machine-learning-compute virtual-machine create + +virtual-machine create a machinelearningservices machine-learning-compute. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices machine-learning-compute|MachineLearningCompute| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|virtual-machine create|CreateOrUpdate#Create#VirtualMachine| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--compute-name**|string|Name of the Azure Machine Learning compute.|compute_name|computeName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--compute-location**|string|Location for the underlying compute|virtual_machine_compute_location|computeLocation| +|**--description**|string|The description of the Machine Learning compute.|virtual_machine_description|description| +|**--resource-id**|string|ARM resource id of the underlying compute|virtual_machine_resource_id|resourceId| +|**--virtual-machine-size**|string|Virtual Machine size|virtual_machine_virtual_machine_size|virtualMachineSize| +|**--ssh-port**|integer|Port open for ssh connections.|virtual_machine_ssh_port|sshPort| +|**--address**|string|Public IP address of the virtual machine.|virtual_machine_address|address| +|**--administrator-account**|object|Admin credentials for virtual machine|virtual_machine_administrator_account|administratorAccount| + +### machinelearningservices notebook prepare + +prepare a machinelearningservices notebook. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices notebook|Notebooks| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|prepare|Prepare| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| + +### machinelearningservices private-endpoint-connection delete + +delete a machinelearningservices private-endpoint-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices private-endpoint-connection|PrivateEndpointConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|delete|Delete| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName| + +### machinelearningservices private-endpoint-connection put + +put a machinelearningservices private-endpoint-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices private-endpoint-connection|PrivateEndpointConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|put|Put| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName| +|**--private-link-service-connection-state**|object|A collection of information about the state of the connection between service consumer and provider.|private_link_service_connection_state|privateLinkServiceConnectionState| + +### machinelearningservices private-endpoint-connection show + +show a machinelearningservices private-endpoint-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices private-endpoint-connection|PrivateEndpointConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|show|Get| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--private-endpoint-connection-name**|string|The name of the private endpoint connection associated with the workspace|private_endpoint_connection_name|privateEndpointConnectionName| + +### machinelearningservices private-link-resource list + +list a machinelearningservices private-link-resource. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices private-link-resource|PrivateLinkResources| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|ListByWorkspace| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| + +### machinelearningservices quota list + +list a machinelearningservices quota. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices quota|Quotas| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|List| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location for which resource usage is queried.|location|location| + +### machinelearningservices quota update + +update a machinelearningservices quota. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices quota|Quotas| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|update|Update| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location for update quota is queried.|location|location| +|**--value**|array|The list for update quota.|value|value| + +### machinelearningservices usage list + +list a machinelearningservices usage. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices usage|Usages| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|List| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location for which resource usage is queried.|location|location| + +### machinelearningservices virtual-machine-size list + +list a machinelearningservices virtual-machine-size. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices virtual-machine-size|VirtualMachineSizes| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|List| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--location**|string|The location upon which virtual-machine-sizes is queried.|location|location| + +### machinelearningservices workspace create + +create a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|create|CreateOrUpdate#Create| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--location**|string|Specifies the location of the resource.|location|location| +|**--tags**|dictionary|Contains resource tags defined as key/value pairs.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--identity-type**|sealed-choice|The identity type.|type|type| +|**--identity-user-assigned-identities**|dictionary|The list of user identities associated with resource. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.|user_assigned_identities|userAssignedIdentities| +|**--description**|string|The description of this workspace.|description|description| +|**--friendly-name**|string|The friendly name for this workspace. This name in mutable|friendly_name|friendlyName| +|**--key-vault**|string|ARM id of the key vault associated with this workspace. This cannot be changed once the workspace has been created|key_vault|keyVault| +|**--application-insights**|string|ARM id of the application insights associated with this workspace. This cannot be changed once the workspace has been created|application_insights|applicationInsights| +|**--container-registry**|string|ARM id of the container registry associated with this workspace. This cannot be changed once the workspace has been created|container_registry|containerRegistry| +|**--storage-account**|string|ARM id of the storage account associated with this workspace. This cannot be changed once the workspace has been created|storage_account|storageAccount| +|**--discovery-url**|string|Url for the discovery service to identify regional endpoints for machine learning experimentation services|discovery_url|discoveryUrl| +|**--hbi-workspace**|boolean|The flag to signal HBI data in the workspace and reduce diagnostic data collected by the service|hbi_workspace|hbiWorkspace| +|**--image-build-compute**|string|The compute name for image build|image_build_compute|imageBuildCompute| +|**--allow-public-access-when-behind-vnet**|boolean|The flag to indicate whether to allow public access when behind VNet.|allow_public_access_when_behind_vnet|allowPublicAccessWhenBehindVnet| +|**--shared-private-link-resources**|array|The list of shared private link resources in this workspace.|shared_private_link_resources|sharedPrivateLinkResources| +|**--encryption-status**|choice|Indicates whether or not the encryption is enabled for the workspace.|status|status| +|**--encryption-key-vault-properties**|object|Customer Key vault properties.|key_vault_properties|keyVaultProperties| + +### machinelearningservices workspace delete + +delete a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|delete|Delete| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| + +### machinelearningservices workspace list + +list a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|ListByResourceGroup| +|list|ListBySubscription| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--skiptoken**|string|Continuation token for pagination.|skiptoken|$skiptoken| + +### machinelearningservices workspace list-key + +list-key a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list-key|ListKeys| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| + +### machinelearningservices workspace resync-key + +resync-key a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|resync-key|ResyncKeys| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| + +### machinelearningservices workspace show + +show a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|show|Get| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| + +### machinelearningservices workspace update + +update a machinelearningservices workspace. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace|Workspaces| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|update|Update| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--tags**|dictionary|The resource tags for the machine learning workspace.|tags|tags| +|**--sku**|object|The sku of the workspace.|sku|sku| +|**--description**|string|The description of this workspace.|description|description| +|**--friendly-name**|string|The friendly name for this workspace.|friendly_name|friendlyName| + +### machinelearningservices workspace-connection create + +create a machinelearningservices workspace-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace-connection|WorkspaceConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|create|Create| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName| +|**--name**|string|Friendly name of the workspace connection|name|name| +|**--category**|string|Category of the workspace connection.|category|category| +|**--target**|string|Target of the workspace connection.|target|target| +|**--auth-type**|string|Authorization type of the workspace connection.|auth_type|authType| +|**--value**|string|Value details of the workspace connection.|value|value| + +### machinelearningservices workspace-connection delete + +delete a machinelearningservices workspace-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace-connection|WorkspaceConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|delete|Delete| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName| + +### machinelearningservices workspace-connection list + +list a machinelearningservices workspace-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace-connection|WorkspaceConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|List| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--target**|string|Target of the workspace connection.|target|target| +|**--category**|string|Category of the workspace connection.|category|category| + +### machinelearningservices workspace-connection show + +show a machinelearningservices workspace-connection. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace-connection|WorkspaceConnections| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|show|Get| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| +|**--connection-name**|string|Friendly name of the workspace connection|connection_name|connectionName| + +### machinelearningservices workspace-feature list + +list a machinelearningservices workspace-feature. + +#### Command group +|Name (az)|Swagger name| +|---------|------------| +|machinelearningservices workspace-feature|WorkspaceFeatures| + +#### Methods +|Name (az)|Swagger name| +|---------|------------| +|list|List| + +#### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|Name of the resource group in which workspace is located.|resource_group_name|resourceGroupName| +|**--workspace-name**|string|Name of Azure Machine Learning workspace.|workspace_name|workspaceName| diff --git a/src/machinelearningservices/setup.cfg b/src/machinelearningservices/setup.cfg new file mode 100644 index 00000000000..2fdd96e5d39 --- /dev/null +++ b/src/machinelearningservices/setup.cfg @@ -0,0 +1 @@ +#setup.cfg \ No newline at end of file diff --git a/src/machinelearningservices/setup.py b/src/machinelearningservices/setup.py new file mode 100644 index 00000000000..8243ef3bd06 --- /dev/null +++ b/src/machinelearningservices/setup.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + + +from codecs import open +from setuptools import setup, find_packages + +# HISTORY.rst entry. +VERSION = '0.1.0' +try: + from azext_machinelearningservices.manual.version import VERSION +except ImportError: + pass + +# The full list of classifiers is available at +# https://pypi.python.org/pypi?%3Aaction=list_classifiers +CLASSIFIERS = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: MIT License', +] + +DEPENDENCIES = [] +try: + from .manual.dependency import DEPENDENCIES +except ImportError: + pass + +with open('README.md', 'r', encoding='utf-8') as f: + README = f.read() +with open('HISTORY.rst', 'r', encoding='utf-8') as f: + HISTORY = f.read() + +setup( + name='machinelearningservices', + version=VERSION, + description='Microsoft Azure Command-Line Tools AzureMachineLearningWorkspaces Extension', + author='Microsoft Corporation', + author_email='azpycli@microsoft.com', + url='https://github.com/Azure/azure-cli-extensions/tree/master/src/machinelearningservices', + long_description=README + '\n\n' + HISTORY, + license='MIT', + classifiers=CLASSIFIERS, + packages=find_packages(), + install_requires=DEPENDENCIES, + package_data={'azext_machinelearningservices': ['azext_metadata.json']}, +)