diff --git a/src/datafactory/HISTORY.rst b/src/datafactory/HISTORY.rst index 3566e61f36a..1c139576ba0 100644 --- a/src/datafactory/HISTORY.rst +++ b/src/datafactory/HISTORY.rst @@ -3,10 +3,6 @@ Release History =============== -0.2.0 -++++++ -* add update command for linked services and triggers and datasets - 0.1.0 ++++++ * Initial release. diff --git a/src/datafactory/azext_datafactory/azext_metadata.json b/src/datafactory/azext_datafactory/azext_metadata.json index 4f48fa652a5..cfc30c747c7 100644 --- a/src/datafactory/azext_datafactory/azext_metadata.json +++ b/src/datafactory/azext_datafactory/azext_metadata.json @@ -1,4 +1,4 @@ { "azext.isExperimental": true, - "azext.minCliCoreVersion": "2.11.0" + "azext.minCliCoreVersion": "2.15.0" } \ No newline at end of file diff --git a/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/datafactory/azext_datafactory/generated/_client_factory.py index 69459245c8d..7db87b484da 100644 --- a/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -11,46 +11,46 @@ def cf_datafactory_cl(cli_ctx, *_): from azure.cli.core.commands.client_factory import get_mgmt_service_client - from ..vendored_sdks.datafactory import DataFactoryManagementClient + from azext_datafactory.vendored_sdks.datafactory import DataFactoryManagementClient return get_mgmt_service_client(cli_ctx, DataFactoryManagementClient) def cf_factory(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).factory + return cf_datafactory_cl(cli_ctx).factories def cf_integration_runtime(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).integration_runtime + return cf_datafactory_cl(cli_ctx).integration_runtimes def cf_integration_runtime_node(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).integration_runtime_node + return cf_datafactory_cl(cli_ctx).integration_runtime_nodes def cf_linked_service(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).linked_service + return cf_datafactory_cl(cli_ctx).linked_services def cf_dataset(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).dataset + return cf_datafactory_cl(cli_ctx).datasets def cf_pipeline(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).pipeline + return cf_datafactory_cl(cli_ctx).pipelines def cf_pipeline_run(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).pipeline_run + return cf_datafactory_cl(cli_ctx).pipeline_runs def cf_activity_run(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).activity_run + return cf_datafactory_cl(cli_ctx).activity_runs def cf_trigger(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).trigger + return cf_datafactory_cl(cli_ctx).triggers def cf_trigger_run(cli_ctx, *_): - return cf_datafactory_cl(cli_ctx).trigger_run + return cf_datafactory_cl(cli_ctx).trigger_runs diff --git a/src/datafactory/azext_datafactory/generated/_help.py b/src/datafactory/azext_datafactory/generated/_help.py index f42e472af34..3aa42c86b17 100644 --- a/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/datafactory/azext_datafactory/generated/_help.py @@ -12,32 +12,35 @@ from knack.help_files import helps -helps['datafactory factory'] = """ +helps['datafactory'] = """ type: group - short-summary: datafactory factory + short-summary: Manage factory with datafactory """ -helps['datafactory factory list'] = """ +helps['datafactory list'] = """ type: command - short-summary: "Lists factories under the specified subscription." + short-summary: "Lists factories. And Lists factories under the specified subscription." examples: - name: Factories_ListByResourceGroup text: |- - az datafactory factory list --resource-group "exampleResourceGroup" + az datafactory list --resource-group "exampleResourceGroup" + - name: Factories_List + text: |- + az datafactory list """ -helps['datafactory factory show'] = """ +helps['datafactory show'] = """ type: command short-summary: "Gets a factory." examples: - name: Factories_Get text: |- - az datafactory factory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" + az datafactory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory factory create'] = """ +helps['datafactory create'] = """ type: command - short-summary: "Creates or updates a factory." + short-summary: "Create a factory." parameters: - name: --factory-vsts-configuration short-summary: "Factory's VSTS repo information." @@ -66,33 +69,41 @@ collaboration-branch: Required. Collaboration branch. root-folder: Required. Root folder. last-commit-id: Last commit id. + - name: --identity + short-summary: "User assigned identity to use to authenticate to customer's key vault. If not provided Managed \ +Service Identity will be used." + long-summary: | + Usage: --identity user-assigned-identity=XX + + user-assigned-identity: The resource id of the user assigned identity to authenticate to customer's key \ +vault. examples: - name: Factories_CreateOrUpdate text: |- - az datafactory factory create --location "East US" --name "exampleFactoryName" --resource-group \ + az datafactory create --location "East US" --name "exampleFactoryName" --resource-group \ "exampleResourceGroup" """ -helps['datafactory factory update'] = """ +helps['datafactory update'] = """ type: command short-summary: "Updates a factory." examples: - name: Factories_Update text: |- - az datafactory factory update --name "exampleFactoryName" --tags exampleTag="exampleValue" \ ---resource-group "exampleResourceGroup" + az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ +"exampleResourceGroup" """ -helps['datafactory factory delete'] = """ +helps['datafactory delete'] = """ type: command short-summary: "Deletes a factory." examples: - name: Factories_Delete text: |- - az datafactory factory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" + az datafactory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" """ -helps['datafactory factory configure-factory-repo'] = """ +helps['datafactory configure-factory-repo'] = """ type: command short-summary: "Updates a factory's repo information." parameters: @@ -126,36 +137,36 @@ examples: - name: Factories_ConfigureFactoryRepo text: |- - az datafactory factory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1\ -234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ + az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-1234\ +5678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ --factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ repository-name="repo" root-folder="/" tenant-id="" --location "East US" """ -helps['datafactory factory get-data-plane-access'] = """ +helps['datafactory get-data-plane-access'] = """ type: command short-summary: "Get Data Plane access." examples: - name: Factories_GetDataPlaneAccess text: |- - az datafactory factory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" \ + az datafactory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" \ --expire-time "2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ "2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" """ -helps['datafactory factory get-git-hub-access-token'] = """ +helps['datafactory get-git-hub-access-token'] = """ type: command short-summary: "Get GitHub Access Token." examples: - name: Factories_GetGitHubAccessToken text: |- - az datafactory factory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code \ -"some" --git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" + az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ +--git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime'] = """ type: group - short-summary: datafactory integration-runtime + short-summary: Manage integration runtime with datafactory """ helps['datafactory integration-runtime list'] = """ @@ -180,7 +191,7 @@ helps['datafactory integration-runtime linked-integration-runtime'] = """ type: group - short-summary: datafactory integration-runtime sub group linked-integration-runtime + short-summary: Manage integration runtime with datafactory sub group linked-integration-runtime """ helps['datafactory integration-runtime linked-integration-runtime create'] = """ @@ -192,28 +203,34 @@ az datafactory integration-runtime linked-integration-runtime create --name \ "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" --location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" \ --subscription-id "061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" \ ---integration-runtime-name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" --subscription-id \ -"12345678-1234-1234-1234-12345678abc" +--integration-runtime-name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" """ helps['datafactory integration-runtime managed'] = """ type: group - short-summary: datafactory integration-runtime sub group managed + short-summary: Manage integration runtime with datafactory sub group managed """ helps['datafactory integration-runtime managed create'] = """ type: command - short-summary: "Creates or updates an integration runtime." + short-summary: "Create an integration runtime." + parameters: + - name: --managed-virtual-network + short-summary: "Managed Virtual Network reference." + long-summary: | + Usage: --managed-virtual-network reference-name=XX + + reference-name: Required. Reference ManagedVirtualNetwork name. """ helps['datafactory integration-runtime self-hosted'] = """ type: group - short-summary: datafactory integration-runtime sub group self-hosted + short-summary: Manage integration runtime with datafactory sub group self-hosted """ helps['datafactory integration-runtime self-hosted create'] = """ type: command - short-summary: "Creates or updates an integration runtime." + short-summary: "Create an integration runtime." examples: - name: IntegrationRuntimes_Create text: |- @@ -361,7 +378,7 @@ helps['datafactory integration-runtime-node'] = """ type: group - short-summary: datafactory integration-runtime-node + short-summary: Manage integration runtime node with datafactory """ helps['datafactory integration-runtime-node show'] = """ @@ -407,7 +424,7 @@ helps['datafactory linked-service'] = """ type: group - short-summary: datafactory linked-service + short-summary: Manage linked service with datafactory """ helps['datafactory linked-service list'] = """ @@ -432,7 +449,7 @@ helps['datafactory linked-service create'] = """ type: command - short-summary: "Creates or updates a linked service." + short-summary: "Create a linked service." examples: - name: LinkedServices_Create text: |- @@ -444,7 +461,7 @@ helps['datafactory linked-service update'] = """ type: command - short-summary: "Creates or updates a linked service." + short-summary: "Update a linked service." examples: - name: LinkedServices_Update text: |- @@ -464,7 +481,7 @@ helps['datafactory dataset'] = """ type: group - short-summary: datafactory dataset + short-summary: Manage dataset with datafactory """ helps['datafactory dataset list'] = """ @@ -488,7 +505,7 @@ helps['datafactory dataset create'] = """ type: command - short-summary: "Creates or updates a dataset." + short-summary: "Create a dataset." examples: - name: Datasets_Create text: |- @@ -502,7 +519,7 @@ helps['datafactory dataset update'] = """ type: command - short-summary: "Creates or updates a dataset." + short-summary: "Update a dataset." parameters: - name: --folder short-summary: "The folder that this Dataset is in. If not specified, Dataset will appear at the root level." @@ -531,7 +548,7 @@ helps['datafactory pipeline'] = """ type: group - short-summary: datafactory pipeline + short-summary: Manage pipeline with datafactory """ helps['datafactory pipeline list'] = """ @@ -556,7 +573,7 @@ helps['datafactory pipeline create'] = """ type: command - short-summary: "Creates or updates a pipeline." + short-summary: "Create a pipeline." examples: - name: Pipelines_Create text: |- @@ -570,13 +587,13 @@ :{\\"type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipe\ line().parameters.OutputBlobNameList\\"}}}],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameLi\ st\\":{\\"type\\":\\"Array\\"}},\\"variables\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":\ -{\\"JobId\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.JobId\\"}}}" --name "examplePipeline" \ ---resource-group "exampleResourceGroup" +{\\"JobId\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.JobId\\"}},\\"duration\\":\\"0.00:10:00\ +\\"}" --name "examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline update'] = """ type: command - short-summary: "Creates or updates a pipeline." + short-summary: "Update a pipeline." examples: - name: Pipelines_Update text: |- @@ -588,8 +605,8 @@ \\"Expression\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDa\ taset\\"}],\\"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"\ type\\":\\"BlobSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline(\ -).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --name \ -"examplePipeline" --resource-group "exampleResourceGroup" +).parameters.OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration \ +"0.00:10:00" --pipeline-name "examplePipeline" --resource-group "exampleResourceGroup" """ helps['datafactory pipeline delete'] = """ @@ -615,7 +632,7 @@ helps['datafactory pipeline-run'] = """ type: group - short-summary: datafactory pipeline-run + short-summary: Manage pipeline run with datafactory """ helps['datafactory pipeline-run show'] = """ @@ -675,7 +692,7 @@ helps['datafactory activity-run'] = """ type: group - short-summary: datafactory activity-run + short-summary: Manage activity run with datafactory """ helps['datafactory activity-run query-by-pipeline-run'] = """ @@ -715,7 +732,7 @@ helps['datafactory trigger'] = """ type: group - short-summary: datafactory trigger + short-summary: Manage trigger with datafactory """ helps['datafactory trigger list'] = """ @@ -739,7 +756,7 @@ helps['datafactory trigger create'] = """ type: command - short-summary: "Creates or updates a trigger." + short-summary: "Create a trigger." examples: - name: Triggers_Create text: |- @@ -753,7 +770,7 @@ helps['datafactory trigger update'] = """ type: command - short-summary: "Creates or updates a trigger." + short-summary: "Update a trigger." examples: - name: Triggers_Update text: |- @@ -843,7 +860,7 @@ helps['datafactory trigger-run'] = """ type: group - short-summary: datafactory trigger-run + short-summary: Manage trigger run with datafactory """ helps['datafactory trigger-run cancel'] = """ diff --git a/src/datafactory/azext_datafactory/generated/_params.py b/src/datafactory/azext_datafactory/generated/_params.py index 85dcca4f8b3..503c4b9ef80 100644 --- a/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/datafactory/azext_datafactory/generated/_params.py @@ -24,6 +24,8 @@ from azext_datafactory.action import ( AddFactoryVstsConfiguration, AddFactoryGitHubConfiguration, + AddIdentity, + AddManagedVirtualNetwork, AddFolder, AddFilters, AddOrderBy @@ -32,52 +34,67 @@ def load_arguments(self, _): - with self.argument_context('datafactory factory list') as c: + with self.argument_context('datafactory list') as c: c.argument('resource_group_name', resource_group_name_type) - with self.argument_context('datafactory factory show') as c: + with self.argument_context('datafactory show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') c.argument('if_none_match', type=str, help='ETag of the factory entity. Should only be specified for get. If ' 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') - with self.argument_context('datafactory factory create') as c: + with self.argument_context('datafactory create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.') c.argument('if_match', type=str, help='ETag of the factory entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') - c.argument('location', arg_type=get_location_type(self.cli_ctx), + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) c.argument('tags', tags_type) - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS ' + c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s ' + c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' 'GitHub repo information.', arg_group='RepoConfiguration') c.argument('global_parameters', type=validate_file_or_dict, help='List of parameters for factory. Expected ' 'value: json-string/@json-file.') - - with self.argument_context('datafactory factory update') as c: + c.argument('public_network_access', arg_type=get_enum_type(['Enabled', 'Disabled']), help='Whether or not ' + 'public network access is allowed for the data factory.') + c.argument('key_name', type=str, help='The name of the key in Azure Key Vault to use as Customer Managed Key.', + arg_group='Encryption') + c.argument('vault_base_url', type=str, help='The url of the Azure Key Vault used for CMK.', + arg_group='Encryption') + c.argument('key_version', type=str, help='The version of the key used for CMK. If not provided, latest version ' + 'will be used.', arg_group='Encryption') + c.argument('identity', action=AddIdentity, nargs='+', help='User assigned identity to use to authenticate to ' + 'customer\'s key vault. If not provided Managed Service Identity will be used.', + arg_group='Encryption') + c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for ' + 'the factory. Expected value: json-string/@json-file.', arg_group='Identity') + + with self.argument_context('datafactory update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') c.argument('tags', tags_type) + c.argument('user_assigned_identities', type=validate_file_or_dict, help='List of user assigned identities for ' + 'the factory. Expected value: json-string/@json-file.', arg_group='Identity') - with self.argument_context('datafactory factory delete') as c: + with self.argument_context('datafactory delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') - with self.argument_context('datafactory factory configure-factory-repo') as c: + with self.argument_context('datafactory configure-factory-repo') as c: c.argument('location', arg_type=get_location_type(self.cli_ctx), id_part='name') c.argument('factory_resource_id', type=str, help='The factory resource id.') - c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS ' + c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS ' 'repo information.', arg_group='RepoConfiguration') - c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s ' + c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s ' 'GitHub repo information.', arg_group='RepoConfiguration') - with self.argument_context('datafactory factory get-data-plane-access') as c: + with self.argument_context('datafactory get-data-plane-access') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') @@ -92,7 +109,7 @@ def load_arguments(self, _): c.argument('expire_time', type=str, help='Expiration time for the token. Maximum duration for the token is ' 'eight hours and by default the token will expire in eight hours.') - with self.argument_context('datafactory factory get-git-hub-access-token') as c: + with self.argument_context('datafactory get-git-hub-access-token') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.', id_part='name') @@ -122,7 +139,7 @@ def load_arguments(self, _): 'belongs to.') c.argument('data_factory_name', type=str, help='The name of the data factory that the linked integration ' 'runtime belongs to.') - c.argument('location', arg_type=get_location_type(self.cli_ctx), + c.argument('location', arg_type=get_location_type(self.cli_ctx), required=False, validator=get_default_location_from_resource_group) with self.argument_context('datafactory integration-runtime managed create') as c: @@ -133,10 +150,12 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') - c.argument('type_properties_compute_properties', type=validate_file_or_dict, help='The compute resource for ' - 'managed integration runtime. Expected value: json-string/@json-file.') - c.argument('type_properties_ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed ' - 'integration runtime. Expected value: json-string/@json-file.') + c.argument('managed_virtual_network', action=AddManagedVirtualNetwork, nargs='+', help='Managed Virtual ' + 'Network reference.') + c.argument('compute_properties', type=validate_file_or_dict, help='The compute resource for managed ' + 'integration runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') + c.argument('ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed integration ' + 'runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') with self.argument_context('datafactory integration-runtime self-hosted create') as c: c.argument('resource_group_name', resource_group_name_type) @@ -146,8 +165,8 @@ def load_arguments(self, _): c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for ' 'update, for which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='Integration runtime description.') - c.argument('type_properties_linked_info', type=validate_file_or_dict, help='The base definition of a linked ' - 'integration runtime. Expected value: json-string/@json-file.') + c.argument('linked_info', type=validate_file_or_dict, help='The base definition of a linked integration ' + 'runtime. Expected value: json-string/@json-file.', arg_group='Type Properties') with self.argument_context('datafactory integration-runtime update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -301,7 +320,7 @@ def load_arguments(self, _): 'json-string/@json-file.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'linked service. Expected value: json-string/@json-file.') - c.ignore('properties') + c.ignore('linked_service') with self.argument_context('datafactory linked-service delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -351,9 +370,9 @@ def load_arguments(self, _): 'json-string/@json-file.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'Dataset. Expected value: json-string/@json-file.') - c.argument('folder', action=AddFolder, nargs='*', help='The folder that this Dataset is in. If not specified, ' + c.argument('folder', action=AddFolder, nargs='+', help='The folder that this Dataset is in. If not specified, ' 'Dataset will appear at the root level.') - c.ignore('properties') + c.ignore('dataset') with self.argument_context('datafactory dataset delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -386,8 +405,7 @@ def load_arguments(self, _): with self.argument_context('datafactory pipeline update') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('factory_name', type=str, help='The factory name.', id_part='name') - c.argument('pipeline_name', options_list=['--name', '-n', '--pipeline-name'], type=str, help='The pipeline ' - 'name.', id_part='child_name_1') + c.argument('pipeline_name', type=str, help='The pipeline name.', id_part='child_name_1') c.argument('if_match', type=str, help='ETag of the pipeline entity. Should only be specified for update, for ' 'which it should match existing entity or can be * for unconditional update.') c.argument('description', type=str, help='The description of the pipeline.') @@ -402,7 +420,9 @@ def load_arguments(self, _): 'Pipeline. Expected value: json-string/@json-file.') c.argument('run_dimensions', type=validate_file_or_dict, help='Dimensions emitted by Pipeline. Expected value: ' 'json-string/@json-file.') - c.argument('folder_name', type=str, help='The name of the folder that this Pipeline is in.') + c.argument('duration', type=validate_file_or_dict, help='TimeSpan value, after which an Azure Monitoring ' + 'Metric is fired. Expected value: json-string/@json-file.', arg_group='Policy Elapsed Time Metric') + c.argument('name', type=str, help='The name of the folder that this Pipeline is in.', arg_group='Folder') c.ignore('pipeline') with self.argument_context('datafactory pipeline delete') as c: @@ -450,8 +470,8 @@ def load_arguments(self, _): 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' 'format.') - c.argument('filters', action=AddFilters, nargs='*', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='*', help='List of OrderBy option.') + c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') + c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') with self.argument_context('datafactory activity-run query-by-pipeline-run') as c: c.argument('resource_group_name', resource_group_name_type) @@ -463,8 +483,8 @@ def load_arguments(self, _): 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' 'format.') - c.argument('filters', action=AddFilters, nargs='*', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='*', help='List of OrderBy option.') + c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') + c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') with self.argument_context('datafactory trigger list') as c: c.argument('resource_group_name', resource_group_name_type) @@ -498,7 +518,7 @@ def load_arguments(self, _): c.argument('description', type=str, help='Trigger description.') c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the ' 'trigger. Expected value: json-string/@json-file.') - c.ignore('properties') + c.ignore('trigger') with self.argument_context('datafactory trigger delete') as c: c.argument('resource_group_name', resource_group_name_type) @@ -567,8 +587,8 @@ def load_arguments(self, _): 'format.') c.argument('last_updated_before', help='The time at or before which the run event was updated in \'ISO 8601\' ' 'format.') - c.argument('filters', action=AddFilters, nargs='*', help='List of filters.') - c.argument('order_by', action=AddOrderBy, nargs='*', help='List of OrderBy option.') + c.argument('filters', action=AddFilters, nargs='+', help='List of filters.') + c.argument('order_by', action=AddOrderBy, nargs='+', help='List of OrderBy option.') with self.argument_context('datafactory trigger-run rerun') as c: c.argument('resource_group_name', resource_group_name_type) diff --git a/src/datafactory/azext_datafactory/generated/action.py b/src/datafactory/azext_datafactory/generated/action.py index ec9616c8672..d593a31beb8 100644 --- a/src/datafactory/azext_datafactory/generated/action.py +++ b/src/datafactory/azext_datafactory/generated/action.py @@ -45,6 +45,10 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['root_folder'] = v[0] elif kl == 'last-commit-id': d['last_commit_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter factory_vsts_configuration. All possible ' + 'keys are: project-name, tenant-id, account-name, repository-name, ' + 'collaboration-branch, root-folder, last-commit-id'.format(k)) d['type'] = 'FactoryVSTSConfiguration' return d @@ -78,10 +82,65 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['root_folder'] = v[0] elif kl == 'last-commit-id': d['last_commit_id'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter factory_git_hub_configuration. All ' + 'possible keys are: host-name, account-name, repository-name, collaboration-branch, ' + 'root-folder, last-commit-id'.format(k)) d['type'] = 'FactoryGitHubConfiguration' return d +class AddIdentity(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.identity = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'user-assigned-identity': + d['user_assigned_identity'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter identity. All possible keys are: ' + 'user-assigned-identity'.format(k)) + return d + + +class AddManagedVirtualNetwork(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + action = self.get_action(values, option_string) + namespace.managed_virtual_network = action + + def get_action(self, values, option_string): # pylint: disable=no-self-use + try: + properties = defaultdict(list) + for (k, v) in (x.split('=', 1) for x in values): + properties[k].append(v) + properties = dict(properties) + except ValueError: + raise CLIError('usage error: {} [KEY=VALUE ...]'.format(option_string)) + d = {} + d['type'] = "ManagedVirtualNetworkReference" + for k in properties: + kl = k.lower() + v = properties[k] + if kl == 'reference-name': + d['reference_name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter managed_virtual_network. All possible ' + 'keys are: reference-name'.format(k)) + return d + + class AddFolder(argparse.Action): def __call__(self, parser, namespace, values, option_string=None): action = self.get_action(values, option_string) @@ -101,6 +160,9 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use v = properties[k] if kl == 'name': d['name'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter folder. All possible keys are: name'. + format(k)) return d @@ -127,6 +189,9 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['operator'] = v[0] elif kl == 'values': d['values'] = v + else: + raise CLIError('Unsupported Key {} is provided for parameter filters. All possible keys are: operand, ' + 'operator, values'.format(k)) return d @@ -151,4 +216,7 @@ def get_action(self, values, option_string): # pylint: disable=no-self-use d['order_by'] = v[0] elif kl == 'order': d['order'] = v[0] + else: + raise CLIError('Unsupported Key {} is provided for parameter order_by. All possible keys are: ' + 'order-by, order'.format(k)) return d diff --git a/src/datafactory/azext_datafactory/generated/commands.py b/src/datafactory/azext_datafactory/generated/commands.py index cfc2a3de83b..df59d171a88 100644 --- a/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/datafactory/azext_datafactory/generated/commands.py @@ -17,27 +17,26 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_factory datafactory_factory = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factory_operations#FactoryOperations.{' - '}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._factories_operations#FactoriesOperatio' + 'ns.{}', client_factory=cf_factory) - with self.command_group('datafactory factory', datafactory_factory, client_factory=cf_factory, - is_experimental=True) as g: - g.custom_command('list', 'datafactory_factory_list') - g.custom_show_command('show', 'datafactory_factory_show') - g.custom_command('create', 'datafactory_factory_create') - g.custom_command('update', 'datafactory_factory_update') - g.custom_command('delete', 'datafactory_factory_delete', confirmation=True) - g.custom_command('configure-factory-repo', 'datafactory_factory_configure_factory_repo') - g.custom_command('get-data-plane-access', 'datafactory_factory_get_data_plane_access') - g.custom_command('get-git-hub-access-token', 'datafactory_factory_get_git_hub_access_token') + with self.command_group('datafactory', datafactory_factory, client_factory=cf_factory, is_experimental=True) as g: + g.custom_command('list', 'datafactory_list') + g.custom_show_command('show', 'datafactory_show') + g.custom_command('create', 'datafactory_create') + g.custom_command('update', 'datafactory_update') + g.custom_command('delete', 'datafactory_delete', confirmation=True) + g.custom_command('configure-factory-repo', 'datafactory_configure_factory_repo') + g.custom_command('get-data-plane-access', 'datafactory_get_data_plane_access') + g.custom_command('get-git-hub-access-token', 'datafactory_get_git_hub_access_token') from azext_datafactory.generated._client_factory import cf_integration_runtime datafactory_integration_runtime = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_operations#Integra' - 'tionRuntimeOperations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtimes_operations#Integr' + 'ationRuntimesOperations.{}', client_factory=cf_integration_runtime) with self.command_group('datafactory integration-runtime', datafactory_integration_runtime, - client_factory=cf_integration_runtime, is_experimental=True) as g: + client_factory=cf_integration_runtime) as g: g.custom_command('list', 'datafactory_integration_runtime_list') g.custom_show_command('show', 'datafactory_integration_runtime_show') g.custom_command('linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runti' @@ -60,11 +59,11 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_integration_runtime_node datafactory_integration_runtime_node = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_node_operations#In' - 'tegrationRuntimeNodeOperations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_nodes_operations#I' + 'ntegrationRuntimeNodesOperations.{}', client_factory=cf_integration_runtime_node) with self.command_group('datafactory integration-runtime-node', datafactory_integration_runtime_node, - client_factory=cf_integration_runtime_node, is_experimental=True) as g: + client_factory=cf_integration_runtime_node) as g: g.custom_show_command('show', 'datafactory_integration_runtime_node_show') g.custom_command('update', 'datafactory_integration_runtime_node_update') g.custom_command('delete', 'datafactory_integration_runtime_node_delete', confirmation=True) @@ -72,39 +71,36 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_linked_service datafactory_linked_service = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._linked_service_operations#LinkedServic' - 'eOperations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._linked_services_operations#LinkedServi' + 'cesOperations.{}', client_factory=cf_linked_service) - with self.command_group('datafactory linked-service', datafactory_linked_service, client_factory=cf_linked_service, - is_experimental=True) as g: + with self.command_group('datafactory linked-service', datafactory_linked_service, + client_factory=cf_linked_service) as g: g.custom_command('list', 'datafactory_linked_service_list') g.custom_show_command('show', 'datafactory_linked_service_show') g.custom_command('create', 'datafactory_linked_service_create') - g.generic_update_command('update', setter_arg_name='properties', custom_func_name='' - 'datafactory_linked_service_update') + g.generic_update_command('update', setter_arg_name='linked_service', + custom_func_name='datafactory_linked_service_update') g.custom_command('delete', 'datafactory_linked_service_delete', confirmation=True) from azext_datafactory.generated._client_factory import cf_dataset datafactory_dataset = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._dataset_operations#DatasetOperations.{' - '}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._datasets_operations#DatasetsOperations' + '.{}', client_factory=cf_dataset) - with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset, - is_experimental=True) as g: + with self.command_group('datafactory dataset', datafactory_dataset, client_factory=cf_dataset) as g: g.custom_command('list', 'datafactory_dataset_list') g.custom_show_command('show', 'datafactory_dataset_show') g.custom_command('create', 'datafactory_dataset_create') - g.generic_update_command('update', setter_arg_name='properties', - custom_func_name='datafactory_dataset_update') + g.generic_update_command('update', setter_arg_name='dataset', custom_func_name='datafactory_dataset_update') g.custom_command('delete', 'datafactory_dataset_delete', confirmation=True) from azext_datafactory.generated._client_factory import cf_pipeline datafactory_pipeline = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_operations#PipelineOperations' - '.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipelines_operations#PipelinesOperatio' + 'ns.{}', client_factory=cf_pipeline) - with self.command_group('datafactory pipeline', datafactory_pipeline, client_factory=cf_pipeline, - is_experimental=True) as g: + with self.command_group('datafactory pipeline', datafactory_pipeline, client_factory=cf_pipeline) as g: g.custom_command('list', 'datafactory_pipeline_list') g.custom_show_command('show', 'datafactory_pipeline_show') g.custom_command('create', 'datafactory_pipeline_create') @@ -114,36 +110,34 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_pipeline_run datafactory_pipeline_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_run_operations#PipelineRunOpe' - 'rations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._pipeline_runs_operations#PipelineRunsO' + 'perations.{}', client_factory=cf_pipeline_run) - with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, client_factory=cf_pipeline_run, - is_experimental=True) as g: + with self.command_group('datafactory pipeline-run', datafactory_pipeline_run, + client_factory=cf_pipeline_run) as g: g.custom_show_command('show', 'datafactory_pipeline_run_show') g.custom_command('cancel', 'datafactory_pipeline_run_cancel') g.custom_command('query-by-factory', 'datafactory_pipeline_run_query_by_factory') from azext_datafactory.generated._client_factory import cf_activity_run datafactory_activity_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._activity_run_operations#ActivityRunOpe' - 'rations.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._activity_runs_operations#ActivityRunsO' + 'perations.{}', client_factory=cf_activity_run) - with self.command_group('datafactory activity-run', datafactory_activity_run, client_factory=cf_activity_run, - is_experimental=True) as g: + with self.command_group('datafactory activity-run', datafactory_activity_run, + client_factory=cf_activity_run) as g: g.custom_command('query-by-pipeline-run', 'datafactory_activity_run_query_by_pipeline_run') from azext_datafactory.generated._client_factory import cf_trigger datafactory_trigger = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_operations#TriggerOperations.{' - '}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._triggers_operations#TriggersOperations' + '.{}', client_factory=cf_trigger) - with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger, - is_experimental=True) as g: + with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger) as g: g.custom_command('list', 'datafactory_trigger_list') g.custom_show_command('show', 'datafactory_trigger_show') g.custom_command('create', 'datafactory_trigger_create') - g.generic_update_command('update', setter_arg_name='properties', - custom_func_name='datafactory_trigger_update') + g.generic_update_command('update', setter_arg_name='trigger', custom_func_name='datafactory_trigger_update') g.custom_command('delete', 'datafactory_trigger_delete', confirmation=True) g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status') g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory') @@ -156,11 +150,10 @@ def load_command_table(self, _): from azext_datafactory.generated._client_factory import cf_trigger_run datafactory_trigger_run = CliCommandType( - operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_run_operations#TriggerRunOpera' - 'tions.{}', + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_runs_operations#TriggerRunsOpe' + 'rations.{}', client_factory=cf_trigger_run) - with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run, - is_experimental=True) as g: + with self.command_group('datafactory trigger-run', datafactory_trigger_run, client_factory=cf_trigger_run) as g: g.custom_command('cancel', 'datafactory_trigger_run_cancel') g.custom_command('query-by-factory', 'datafactory_trigger_run_query_by_factory') g.custom_command('rerun', 'datafactory_trigger_run_rerun') diff --git a/src/datafactory/azext_datafactory/generated/custom.py b/src/datafactory/azext_datafactory/generated/custom.py index 513e21ca96d..b67d8124d65 100644 --- a/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/datafactory/azext_datafactory/generated/custom.py @@ -7,39 +7,45 @@ # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- +# pylint: disable=line-too-long # pylint: disable=too-many-lines # pylint: disable=unused-argument -import json from knack.util import CLIError from azure.cli.core.util import sdk_no_wait -def datafactory_factory_list(client, - resource_group_name=None): +def datafactory_list(client, + resource_group_name=None): if resource_group_name: return client.list_by_resource_group(resource_group_name=resource_group_name) return client.list() -def datafactory_factory_show(client, - resource_group_name, - factory_name, - if_none_match=None): +def datafactory_show(client, + resource_group_name, + factory_name, + if_none_match=None): return client.get(resource_group_name=resource_group_name, factory_name=factory_name, if_none_match=if_none_match) -def datafactory_factory_create(client, - resource_group_name, - factory_name, - if_match=None, - location=None, - tags=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None, - global_parameters=None): +def datafactory_create(client, + resource_group_name, + factory_name, + if_match=None, + location=None, + tags=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None, + global_parameters=None, + public_network_access=None, + key_name=None, + vault_base_url=None, + key_version=None, + identity=None, + user_assigned_identities=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -49,38 +55,53 @@ def datafactory_factory_create(client, raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + factory = {} + factory['location'] = location + factory['tags'] = tags + factory['repo_configuration'] = repo_configuration + factory['global_parameters'] = global_parameters + factory['public_network_access'] = public_network_access + factory['encryption'] = {} + factory['encryption']['key_name'] = key_name + factory['encryption']['vault_base_url'] = vault_base_url + factory['encryption']['key_version'] = key_version + factory['encryption']['identity'] = identity + factory['identity'] = {} + factory['identity']['type'] = "SystemAssigned" + factory['identity']['user_assigned_identities'] = user_assigned_identities return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, if_match=if_match, - location=location, - tags=tags, - identity=None, - repo_configuration=repo_configuration, - global_parameters=global_parameters) - - -def datafactory_factory_update(client, - resource_group_name, - factory_name, - tags=None): + factory=factory) + + +def datafactory_update(client, + resource_group_name, + factory_name, + tags=None, + user_assigned_identities=None): + factory_update_parameters = {} + factory_update_parameters['tags'] = tags + factory_update_parameters['identity'] = {} + factory_update_parameters['identity']['type'] = "SystemAssigned" + factory_update_parameters['identity']['user_assigned_identities'] = user_assigned_identities return client.update(resource_group_name=resource_group_name, factory_name=factory_name, - tags=tags, - identity={"type": "SystemAssigned"}) + factory_update_parameters=factory_update_parameters) -def datafactory_factory_delete(client, - resource_group_name, - factory_name): +def datafactory_delete(client, + resource_group_name, + factory_name): return client.delete(resource_group_name=resource_group_name, factory_name=factory_name) -def datafactory_factory_configure_factory_repo(client, - location, - factory_resource_id=None, - factory_vsts_configuration=None, - factory_git_hub_configuration=None): +def datafactory_configure_factory_repo(client, + location, + factory_resource_id=None, + factory_vsts_configuration=None, + factory_git_hub_configuration=None): all_repo_configuration = [] if factory_vsts_configuration is not None: all_repo_configuration.append(factory_vsts_configuration) @@ -90,39 +111,45 @@ def datafactory_factory_configure_factory_repo(client, raise CLIError('at most one of factory_vsts_configuration, factory_git_hub_configuration is needed for ' 'repo_configuration!') repo_configuration = all_repo_configuration[0] if len(all_repo_configuration) == 1 else None + factory_repo_update = {} + factory_repo_update['factory_resource_id'] = factory_resource_id + factory_repo_update['repo_configuration'] = repo_configuration return client.configure_factory_repo(location_id=location, - factory_resource_id=factory_resource_id, - repo_configuration=repo_configuration) + factory_repo_update=factory_repo_update) -def datafactory_factory_get_data_plane_access(client, - resource_group_name, - factory_name, - permissions=None, - access_resource_path=None, - profile_name=None, - start_time=None, - expire_time=None): +def datafactory_get_data_plane_access(client, + resource_group_name, + factory_name, + permissions=None, + access_resource_path=None, + profile_name=None, + start_time=None, + expire_time=None): + policy = {} + policy['permissions'] = permissions + policy['access_resource_path'] = access_resource_path + policy['profile_name'] = profile_name + policy['start_time'] = start_time + policy['expire_time'] = expire_time return client.get_data_plane_access(resource_group_name=resource_group_name, factory_name=factory_name, - permissions=permissions, - access_resource_path=access_resource_path, - profile_name=profile_name, - start_time=start_time, - expire_time=expire_time) - - -def datafactory_factory_get_git_hub_access_token(client, - resource_group_name, - factory_name, - git_hub_access_code, - git_hub_access_token_base_url, - git_hub_client_id=None): + policy=policy) + + +def datafactory_get_git_hub_access_token(client, + resource_group_name, + factory_name, + git_hub_access_code, + git_hub_access_token_base_url, + git_hub_client_id=None): + git_hub_access_token_request = {} + git_hub_access_token_request['git_hub_access_code'] = git_hub_access_code + git_hub_access_token_request['git_hub_client_id'] = git_hub_client_id + git_hub_access_token_request['git_hub_access_token_base_url'] = git_hub_access_token_base_url return client.get_git_hub_access_token(resource_group_name=resource_group_name, factory_name=factory_name, - git_hub_access_code=git_hub_access_code, - git_hub_client_id=git_hub_client_id, - git_hub_access_token_base_url=git_hub_access_token_base_url) + git_hub_access_token_request=git_hub_access_token_request) def datafactory_integration_runtime_list(client, @@ -151,13 +178,15 @@ def datafactory_integration_runtime_linked_integration_runtime_create(client, subscription_id=None, data_factory_name=None, location=None): + create_linked_integration_runtime_request = {} + create_linked_integration_runtime_request['name'] = name + create_linked_integration_runtime_request['subscription_id'] = subscription_id + create_linked_integration_runtime_request['data_factory_name'] = data_factory_name + create_linked_integration_runtime_request['data_factory_location'] = location return client.create_linked_integration_runtime(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - name=name, - subscription_id=subscription_id, - data_factory_name=data_factory_name, - data_factory_location=location) + create_linked_integration_runtime_request=create_linked_integration_runtime_request) def datafactory_integration_runtime_managed_create(client, @@ -166,18 +195,21 @@ def datafactory_integration_runtime_managed_create(client, integration_runtime_name, if_match=None, description=None, - type_properties_compute_properties=None, - type_properties_ssis_properties=None): - properties = {} - properties['type'] = 'Managed' - properties['description'] = description - properties['compute_properties'] = type_properties_compute_properties - properties['ssis_properties'] = type_properties_ssis_properties + managed_virtual_network=None, + compute_properties=None, + ssis_properties=None): + integration_runtime = {} + integration_runtime['properties'] = {} + integration_runtime['properties']['type'] = 'Managed' + integration_runtime['properties']['description'] = description + integration_runtime['properties']['managed_virtual_network'] = managed_virtual_network + integration_runtime['properties']['compute_properties'] = compute_properties + integration_runtime['properties']['ssis_properties'] = ssis_properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, if_match=if_match, - properties=properties) + integration_runtime=integration_runtime) def datafactory_integration_runtime_self_hosted_create(client, @@ -186,16 +218,17 @@ def datafactory_integration_runtime_self_hosted_create(client, integration_runtime_name, if_match=None, description=None, - type_properties_linked_info=None): - properties = {} - properties['type'] = 'SelfHosted' - properties['description'] = description - properties['linked_info'] = type_properties_linked_info + linked_info=None): + integration_runtime = {} + integration_runtime['properties'] = {} + integration_runtime['properties']['type'] = 'SelfHosted' + integration_runtime['properties']['description'] = description + integration_runtime['properties']['linked_info'] = linked_info return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, if_match=if_match, - properties=properties) + integration_runtime=integration_runtime) def datafactory_integration_runtime_update(client, @@ -204,11 +237,13 @@ def datafactory_integration_runtime_update(client, integration_runtime_name, auto_update=None, update_delay_offset=None): + update_integration_runtime_request = {} + update_integration_runtime_request['auto_update'] = auto_update + update_integration_runtime_request['update_delay_offset'] = update_delay_offset return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - auto_update=auto_update, - update_delay_offset=update_delay_offset) + update_integration_runtime_request=update_integration_runtime_request) def datafactory_integration_runtime_delete(client, @@ -251,9 +286,9 @@ def datafactory_integration_runtime_list_auth_key(client, resource_group_name, factory_name, integration_runtime_name): - return client.list_auth_key(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name) + return client.list_auth_keys(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) def datafactory_integration_runtime_regenerate_auth_key(client, @@ -261,10 +296,12 @@ def datafactory_integration_runtime_regenerate_auth_key(client, factory_name, integration_runtime_name, key_name=None): + regenerate_key_parameters = {} + regenerate_key_parameters['key_name'] = key_name return client.regenerate_auth_key(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, - key_name=key_name) + regenerate_key_parameters=regenerate_key_parameters) def datafactory_integration_runtime_remove_link(client, @@ -272,10 +309,12 @@ def datafactory_integration_runtime_remove_link(client, factory_name, integration_runtime_name, linked_factory_name): - return client.remove_link(resource_group_name=resource_group_name, - factory_name=factory_name, - integration_runtime_name=integration_runtime_name, - linked_factory_name=linked_factory_name) + linked_integration_runtime_request = {} + linked_integration_runtime_request['linked_factory_name'] = linked_factory_name + return client.remove_links(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + linked_integration_runtime_request=linked_integration_runtime_request) def datafactory_integration_runtime_start(client, @@ -337,11 +376,13 @@ def datafactory_integration_runtime_node_update(client, integration_runtime_name, node_name, concurrent_jobs_limit=None): + update_integration_runtime_node_request = {} + update_integration_runtime_node_request['concurrent_jobs_limit'] = concurrent_jobs_limit return client.update(resource_group_name=resource_group_name, factory_name=factory_name, integration_runtime_name=integration_runtime_name, node_name=node_name, - concurrent_jobs_limit=concurrent_jobs_limit) + update_integration_runtime_node_request=update_integration_runtime_node_request) def datafactory_integration_runtime_node_delete(client, @@ -390,11 +431,13 @@ def datafactory_linked_service_create(client, linked_service_name, properties, if_match=None): + linked_service = {} + linked_service['properties'] = properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, linked_service_name=linked_service_name, if_match=if_match, - properties=properties) + linked_service=linked_service) def datafactory_linked_service_update(instance, @@ -414,7 +457,7 @@ def datafactory_linked_service_update(instance, instance.properties.parameters = parameters if annotations is not None: instance.properties.annotations = annotations - return instance.properties + return instance def datafactory_linked_service_delete(client, @@ -450,11 +493,13 @@ def datafactory_dataset_create(client, dataset_name, properties, if_match=None): + dataset = {} + dataset['properties'] = properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, dataset_name=dataset_name, if_match=if_match, - properties=properties) + dataset=dataset) def datafactory_dataset_update(instance, @@ -483,7 +528,7 @@ def datafactory_dataset_update(instance, instance.properties.annotations = annotations if folder is not None: instance.properties.folder = folder - return instance.properties + return instance def datafactory_dataset_delete(client, @@ -538,7 +583,8 @@ def datafactory_pipeline_update(instance, concurrency=None, annotations=None, run_dimensions=None, - folder_name=None): + duration=None, + name=None): if description is not None: instance.description = description if activities is not None: @@ -553,8 +599,10 @@ def datafactory_pipeline_update(instance, instance.annotations = annotations if run_dimensions is not None: instance.run_dimensions = run_dimensions - if folder_name is not None: - instance.name_folder_name = folder_name + if duration is not None: + instance.elapsed_time_metric.duration = duration + if name is not None: + instance.folder.name = name return instance @@ -614,13 +662,15 @@ def datafactory_pipeline_run_query_by_factory(client, continuation_token=None, filters=None, order_by=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['last_updated_after'] = last_updated_after + filter_parameters['last_updated_before'] = last_updated_before + filter_parameters['filters'] = filters + filter_parameters['order_by'] = order_by return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token_parameter=continuation_token, - last_updated_after=last_updated_after, - last_updated_before=last_updated_before, - filters=filters, - order_by=order_by) + filter_parameters=filter_parameters) def datafactory_activity_run_query_by_pipeline_run(client, @@ -632,14 +682,16 @@ def datafactory_activity_run_query_by_pipeline_run(client, continuation_token=None, filters=None, order_by=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['last_updated_after'] = last_updated_after + filter_parameters['last_updated_before'] = last_updated_before + filter_parameters['filters'] = filters + filter_parameters['order_by'] = order_by return client.query_by_pipeline_run(resource_group_name=resource_group_name, factory_name=factory_name, run_id=run_id, - continuation_token_parameter=continuation_token, - last_updated_after=last_updated_after, - last_updated_before=last_updated_before, - filters=filters, - order_by=order_by) + filter_parameters=filter_parameters) def datafactory_trigger_list(client, @@ -666,11 +718,13 @@ def datafactory_trigger_create(client, trigger_name, properties, if_match=None): + trigger = {} + trigger['properties'] = properties return client.create_or_update(resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, if_match=if_match, - properties=properties) + trigger=trigger) def datafactory_trigger_update(instance, @@ -684,7 +738,7 @@ def datafactory_trigger_update(instance, instance.properties.description = description if annotations is not None: instance.properties.annotations = annotations - return instance.properties + return instance def datafactory_trigger_delete(client, @@ -710,10 +764,12 @@ def datafactory_trigger_query_by_factory(client, factory_name, continuation_token=None, parent_trigger_name=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['parent_trigger_name'] = parent_trigger_name return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token_parameter=continuation_token, - parent_trigger_name=parent_trigger_name) + filter_parameters=filter_parameters) def datafactory_trigger_start(client, @@ -746,7 +802,7 @@ def datafactory_trigger_subscribe_to_event(client, trigger_name, no_wait=False): return sdk_no_wait(no_wait, - client.begin_subscribe_to_event, + client.begin_subscribe_to_events, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name) @@ -758,7 +814,7 @@ def datafactory_trigger_unsubscribe_from_event(client, trigger_name, no_wait=False): return sdk_no_wait(no_wait, - client.begin_unsubscribe_from_event, + client.begin_unsubscribe_from_events, resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name) @@ -783,13 +839,15 @@ def datafactory_trigger_run_query_by_factory(client, continuation_token=None, filters=None, order_by=None): + filter_parameters = {} + filter_parameters['continuation_token'] = continuation_token + filter_parameters['last_updated_after'] = last_updated_after + filter_parameters['last_updated_before'] = last_updated_before + filter_parameters['filters'] = filters + filter_parameters['order_by'] = order_by return client.query_by_factory(resource_group_name=resource_group_name, factory_name=factory_name, - continuation_token_parameter=continuation_token, - last_updated_after=last_updated_after, - last_updated_before=last_updated_before, - filters=filters, - order_by=order_by) + filter_parameters=filter_parameters) def datafactory_trigger_run_rerun(client, diff --git a/src/datafactory/azext_datafactory/tests/__init__.py b/src/datafactory/azext_datafactory/tests/__init__.py index 50e0627daff..70488e93851 100644 --- a/src/datafactory/azext_datafactory/tests/__init__.py +++ b/src/datafactory/azext_datafactory/tests/__init__.py @@ -31,8 +31,8 @@ def try_manual(func): def import_manual_function(origin_func): from importlib import import_module - decorated_path = inspect.getfile(origin_func) - module_path = __path__[0] + decorated_path = inspect.getfile(origin_func).lower() + module_path = __path__[0].lower() if not decorated_path.startswith(module_path): raise Exception("Decorator can only be used in submodules!") manual_path = os.path.join( @@ -46,7 +46,6 @@ def import_manual_function(origin_func): def get_func_to_call(): func_to_call = func try: - func_to_call = import_manual_function(func) func_to_call = import_manual_function(func) logger.info("Found manual override for %s(...)", func.__name__) except (ImportError, AttributeError): @@ -66,6 +65,9 @@ def wrapper(*args, **kwargs): ret = func_to_call(*args, **kwargs) except (AssertionError, AzureError, CliTestError, CliExecutionError, SystemExit, JMESPathCheckAssertionError) as e: + use_exception_cache = os.getenv("TEST_EXCEPTION_CACHE") + if use_exception_cache is None or use_exception_cache.lower() != "true": + raise test_map[func.__name__]["end_dt"] = dt.datetime.utcnow() test_map[func.__name__]["result"] = FAILED test_map[func.__name__]["error_message"] = str(e).replace("\r\n", " ").replace("\n", " ")[:500] diff --git a/src/datafactory/azext_datafactory/tests/latest/example_steps.py b/src/datafactory/azext_datafactory/tests/latest/example_steps.py new file mode 100644 index 00000000000..971dd167afb --- /dev/null +++ b/src/datafactory/azext_datafactory/tests/latest/example_steps.py @@ -0,0 +1,821 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + + +from .. import try_manual + + +# EXAMPLE: /Factories/put/Factories_CreateOrUpdate +@try_manual +def step_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory create ' + '--location "East US" ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/get/Factories_Get +@try_manual +def step_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory show ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/get/Factories_List +@try_manual +def step_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory list ' + '-g ""', + checks=checks) + + +# EXAMPLE: /Factories/get/Factories_ListByResourceGroup +@try_manual +def step_list2(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory list ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/patch/Factories_Update +@try_manual +def step_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory update ' + '--name "{myFactory}" ' + '--tags exampleTag="exampleValue" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/post/Factories_ConfigureFactoryRepo +@try_manual +def step_configure_factory_repo(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory configure-factory-repo ' + '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' + 'ry/factories/{myFactory}" ' + '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' + 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' + '--location "East US"', + checks=checks) + + +# EXAMPLE: /Factories/post/Factories_GetDataPlaneAccess +@try_manual +def step_get_data_plane_access(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory get-data-plane-access ' + '--name "{myFactory}" ' + '--access-resource-path "" ' + '--expire-time "2018-11-10T09:46:20.2659347Z" ' + '--permissions "r" ' + '--profile-name "DefaultProfile" ' + '--start-time "2018-11-10T02:46:20.2659347Z" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Factories/post/Factories_GetGitHubAccessToken +@try_manual +def step_get_git_hub_access_token(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory get-git-hub-access-token ' + '--name "{myFactory}" ' + '--git-hub-access-code "some" ' + '--git-hub-access-token-base-url "some" ' + '--git-hub-client-id "some" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /ActivityRuns/post/ActivityRuns_QueryByPipelineRun +@try_manual +def step_activity_run_query_by_pipeline_run(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory activity-run query-by-pipeline-run ' + '--factory-name "{myFactory}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=checks) + + +# EXAMPLE: /Datasets/put/Datasets_Create +@try_manual +def step_dataset_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset create ' + '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' + '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' + 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' + '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' + '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/put/Datasets_Update +@try_manual +def step_dataset_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset update ' + '--description "Example description" ' + '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' + '\\"}}" ' + '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' + '}}}}" ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/get/Datasets_Get +@try_manual +def step_dataset_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset show ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/get/Datasets_ListByFactory +@try_manual +def step_dataset_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Datasets/delete/Datasets_Delete +@try_manual +def step_dataset_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory dataset delete -y ' + '--name "{myDataset}" ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create +@try_manual +def step_integration_runtime_self_hosted_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime self-hosted create ' + '--factory-name "{myFactory}" ' + '--description "A selfhosted integration runtime" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_Get +@try_manual +def step_integration_runtime_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime show ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_ListByFactory +@try_manual +def step_integration_runtime_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update +@try_manual +def step_integration_runtime_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime update ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}" ' + '--auto-update "Off" ' + '--update-delay-offset "\\"PT3H\\""', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_CreateLinkedIntegrationRuntime +@try_manual +def step_integration_runtime_linked(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime linked-integration-runtime create ' + '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' + '--location "West US" ' + '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' + '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetConnectionInfo +@try_manual +def step_integration_runtime_get_connection_info(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime get-connection-info ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetMonitoringData +@try_manual +def step_integration_runtime_get_monitoring_data(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime get-monitoring-data ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetStatus +@try_manual +def step_integration_runtime_get_status(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime get-status ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_ListAuthKeys +@try_manual +def step_integration_runtime_list_auth_key(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime list-auth-key ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_RegenerateAuthKey +@try_manual +def step_integration_runtime_regenerate_auth_key(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime regenerate-auth-key ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--key-name "authKey2" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start +@try_manual +def step_integration_runtime_start(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime start ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop +@try_manual +def step_integration_runtime_stop(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime stop ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime2}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_SyncCredentials +@try_manual +def step_integration_runtime_sync_credentials(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime sync-credentials ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Upgrade +@try_manual +def step_integration_runtime_remove_link(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime remove-link ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--linked-factory-name "exampleFactoryName-linked" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/get/IntegrationRuntimeNodes_Get +@try_manual +def step_integration_runtime_node_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node show ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/patch/IntegrationRuntimeNodes_Update +@try_manual +def step_integration_runtime_node_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node update ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}" ' + '--concurrent-jobs-limit 2', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/post/IntegrationRuntimeNodes_GetIpAddress +@try_manual +def step_integration_runtime_node_get_ip_address(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node get-ip-address ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimeNodes/delete/IntegrationRuntimesNodes_Delete +@try_manual +def step_integration_runtime_node_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime-node delete -y ' + '--factory-name "{myFactory}" ' + '--integration-runtime-name "{myIntegrationRuntime}" ' + '--node-name "Node_1" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /IntegrationRuntimes/delete/IntegrationRuntimes_Delete +@try_manual +def step_integration_runtime_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory integration-runtime delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/put/LinkedServices_Create +@try_manual +def step_linked_service_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service create ' + '--factory-name "{myFactory}" ' + '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' + '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' + 'tKey=\\"}}}}}}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/put/LinkedServices_Update +@try_manual +def step_linked_service_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service update ' + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/get/LinkedServices_Get +@try_manual +def step_linked_service_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service show ' + '--factory-name "{myFactory}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/get/LinkedServices_ListByFactory +@try_manual +def step_linked_service_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /LinkedServices/delete/LinkedServices_Delete +@try_manual +def step_linked_service_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory linked-service delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myLinkedService}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /PipelineRuns/get/PipelineRuns_Get +@try_manual +def step_pipeline_run_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline-run show ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', + checks=checks) + + +# EXAMPLE: /PipelineRuns/post/PipelineRuns_Cancel +@try_manual +def step_pipeline_run_cancel(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline-run cancel ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', + checks=checks) + + +# EXAMPLE: /PipelineRuns/post/PipelineRuns_QueryByFactory +@try_manual +def step_pipeline_run_query_by_factory(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline-run query-by-factory ' + '--factory-name "{myFactory}" ' + '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/put/Pipelines_Create +@try_manual +def step_pipeline_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline create ' + '--factory-name "{myFactory}" ' + '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' + 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' + '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' + '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' + 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' + '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' + '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' + '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' + 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' + 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' + '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}},\\"duration\\":\\"0.00:10:00\\"}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/put/Pipelines_Update +@try_manual +def step_pipeline_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline update ' + '--factory-name "{myFactory}" ' + '--description "Example description" ' + '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' + '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' + ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' + ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' + 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' + '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' + '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' + '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' + 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' + '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' + '--duration "0.00:10:00" ' + '--pipeline-name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/get/Pipelines_Get +@try_manual +def step_pipeline_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline show ' + '--factory-name "{myFactory}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/get/Pipelines_ListByFactory +@try_manual +def step_pipeline_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/post/Pipelines_CreateRun +@try_manual +def step_pipeline_create_run(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline create-run ' + '--factory-name "{myFactory}" ' + '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Pipelines/delete/Pipelines_Delete +@try_manual +def step_pipeline_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory pipeline delete -y ' + '--factory-name "{myFactory}" ' + '--name "{myPipeline}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Triggers/put/Triggers_Create +@try_manual +def step_trigger_create(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger create ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' + 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' + 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' + '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' + '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/put/Triggers_Update +@try_manual +def step_trigger_update(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger update ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--description "Example description" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/get/Triggers_Get +@try_manual +def step_trigger_show(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger show ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/get/Triggers_ListByFactory +@try_manual +def step_trigger_list(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger list ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_GetEventSubscriptionStatus +@try_manual +def step_trigger_get_event_subscription_status(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger get-event-subscription-status ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_QueryByFactory +@try_manual +def step_trigger_query_by_factory(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger query-by-factory ' + '--factory-name "{myFactory}" ' + '--parent-trigger-name "{myTrigger}" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_Start +@try_manual +def step_trigger_start(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger start ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_Stop +@try_manual +def step_trigger_stop(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger stop ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_SubscribeToEvents +@try_manual +def step_trigger_subscribe_to_event(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger subscribe-to-event ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/post/Triggers_UnsubscribeFromEvents +@try_manual +def step_trigger_unsubscribe_from_event(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger unsubscribe-from-event ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /TriggerRuns/post/TriggerRuns_QueryByFactory +@try_manual +def step_trigger_run_query_by_factory(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger-run query-by-factory ' + '--factory-name "{myFactory}" ' + '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' + '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' + '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' + '--resource-group "{rg}"', + checks=checks) + + +# EXAMPLE: /TriggerRuns/post/Triggers_Cancel +@try_manual +def step_trigger_run_cancel(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger-run cancel ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' + '--trigger-name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /TriggerRuns/post/Triggers_Rerun +@try_manual +def step_trigger_run_rerun(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger-run rerun ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" ' + '--trigger-name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Triggers/delete/Triggers_Delete +@try_manual +def step_trigger_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory trigger delete -y ' + '--factory-name "{myFactory}" ' + '--resource-group "{rg}" ' + '--name "{myTrigger}"', + checks=checks) + + +# EXAMPLE: /Factories/delete/Factories_Delete +@try_manual +def step_delete(test, rg, checks=None): + if checks is None: + checks = [] + test.cmd('az datafactory delete -y ' + '--name "{myFactory}" ' + '--resource-group "{rg}"', + checks=checks) + diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index bdc9fd88e23..b75ef5fb6d6 100644 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -10,745 +10,158 @@ import os from azure.cli.testsdk import ScenarioTest -from .. import try_manual, raise_if, calc_coverage from azure.cli.testsdk import ResourceGroupPreparer +from .example_steps import step_create +from .example_steps import step_update +from .example_steps import step_linked_service_create +from .example_steps import step_linked_service_update +from .example_steps import step_dataset_create +from .example_steps import step_dataset_update +from .example_steps import step_pipeline_create +from .example_steps import step_pipeline_update +from .example_steps import step_trigger_create +from .example_steps import step_trigger_update +from .example_steps import step_integration_runtime_self_hosted_create +from .example_steps import step_integration_runtime_update +from .example_steps import step_integration_runtime_linked +from .example_steps import step_pipeline_create_run +from .example_steps import step_integration_runtime_show +from .example_steps import step_linked_service_show +from .example_steps import step_pipeline_run_show +from .example_steps import step_pipeline_show +from .example_steps import step_dataset_show +from .example_steps import step_trigger_show +from .example_steps import step_integration_runtime_list +from .example_steps import step_linked_service_list +from .example_steps import step_pipeline_list +from .example_steps import step_trigger_list +from .example_steps import step_dataset_list +from .example_steps import step_show +from .example_steps import step_list2 +from .example_steps import step_list +from .example_steps import step_integration_runtime_regenerate_auth_key +from .example_steps import step_integration_runtime_get_connection_info +from .example_steps import step_integration_runtime_sync_credentials +from .example_steps import step_integration_runtime_get_monitoring_data +from .example_steps import step_integration_runtime_list_auth_key +from .example_steps import step_integration_runtime_remove_link +from .example_steps import step_integration_runtime_get_status +from .example_steps import step_integration_runtime_start +from .example_steps import step_integration_runtime_stop +from .example_steps import step_trigger_get_event_subscription_status +from .example_steps import step_activity_run_query_by_pipeline_run +from .example_steps import step_trigger_unsubscribe_from_event +from .example_steps import step_trigger_subscribe_to_event +from .example_steps import step_trigger_start +from .example_steps import step_trigger_stop +from .example_steps import step_get_git_hub_access_token +from .example_steps import step_get_data_plane_access +from .example_steps import step_pipeline_run_query_by_factory +from .example_steps import step_pipeline_run_cancel +from .example_steps import step_trigger_run_query_by_factory +from .example_steps import step_configure_factory_repo +from .example_steps import step_integration_runtime_delete +from .example_steps import step_trigger_delete +from .example_steps import step_pipeline_delete +from .example_steps import step_dataset_delete +from .example_steps import step_linked_service_delete +from .example_steps import step_delete +from .. import ( + try_manual, + raise_if, + calc_coverage +) TEST_DIR = os.path.abspath(os.path.join(os.path.abspath(__file__), '..')) +# Env setup_scenario @try_manual -def setup(test, rg): +def setup_scenario(test, rg): pass -# EXAMPLE: Factories_CreateOrUpdate +# Env cleanup_scenario @try_manual -def step_factories_createorupdate(test, rg): - test.cmd('az datafactory factory create ' - '--location "East US" ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_Update -@try_manual -def step_factories_update(test, rg): - test.cmd('az datafactory factory update ' - '--name "{myFactory}" ' - '--tags exampleTag="exampleValue" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_Create -@try_manual -def step_linkedservices_create(test, rg): - test.cmd('az datafactory linked-service create ' - '--factory-name "{myFactory}" ' - '--properties "{{\\"type\\":\\"AzureStorage\\",\\"typeProperties\\":{{\\"connectionString\\":{{\\"type\\":' - '\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=https;AccountName=examplestorageaccount;Accoun' - 'tKey=\\"}}}}}}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_Update -@try_manual -def step_linkedservices_update(test, rg): - test.cmd('az datafactory linked-service update ' - '--factory-name "{myFactory}" ' - '--description "Example description" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Create -@try_manual -def step_datasets_create(test, rg): - test.cmd('az datafactory dataset create ' - '--properties "{{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{{\\"type\\":\\"LinkedServiceReference' - '\\",\\"referenceName\\":\\"{myLinkedService}\\"}},\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Str' - 'ing\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"}}}},\\"typeProperties\\":{{\\"format\\":{{\\"type\\' - '":\\"TextFormat\\"}},\\"fileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"' - '}},\\"folderPath\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Update -@try_manual -def step_datasets_update(test, rg): - test.cmd('az datafactory dataset update ' - '--description "Example description" ' - '--linked-service-name "{{\\"type\\":\\"LinkedServiceReference\\",\\"referenceName\\":\\"{myLinkedService}' - '\\"}}" ' - '--parameters "{{\\"MyFileName\\":{{\\"type\\":\\"String\\"}},\\"MyFolderPath\\":{{\\"type\\":\\"String\\"' - '}}}}" ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Pipelines_Create -@try_manual -def step_pipelines_create(test, rg): - test.cmd('az datafactory pipeline create ' - '--factory-name "{myFactory}" ' - '--pipeline "{{\\"activities\\":[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typ' - 'eProperties\\":{{\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs' - '\\":[{{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",' - '\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"' - 'type\\":\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value' - '\\":\\"@item()\\"}},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],' - '\\"typeProperties\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\' - '":{{\\"type\\":\\"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",' - '\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}}}}],\\"parameters\\":{{\\"JobId\\":{{\\"ty' - 'pe\\":\\"String\\"}},\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}},\\"variables\\":{{\\"TestVaria' - 'bleArray\\":{{\\"type\\":\\"Array\\"}}}},\\"runDimensions\\":{{\\"JobId\\":{{\\"type\\":\\"Expression\\",' - '\\"value\\":\\"@pipeline().parameters.JobId\\"}}}}}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Pipelines_Update -@try_manual -def step_pipelines_update(test, rg): - test.cmd('az datafactory pipeline update ' - '--factory-name "{myFactory}" ' - '--description "Example description" ' - '--activities "[{{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{{' - '\\"activities\\":[{{\\"name\\":\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{{\\"type\\"' - ':\\"DatasetReference\\",\\"parameters\\":{{\\"MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\"' - ':\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"outputs\\":[{{\\"type\\":\\"Dataset' - 'Reference\\",\\"parameters\\":{{\\"MyFileName\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@item()\\"}' - '},\\"MyFolderPath\\":\\"examplecontainer\\"}},\\"referenceName\\":\\"{myDataset}\\"}}],\\"typeProperties' - '\\":{{\\"dataIntegrationUnits\\":32,\\"sink\\":{{\\"type\\":\\"BlobSink\\"}},\\"source\\":{{\\"type\\":\\' - '"BlobSource\\"}}}}}}],\\"isSequential\\":true,\\"items\\":{{\\"type\\":\\"Expression\\",\\"value\\":\\"@p' - 'ipeline().parameters.OutputBlobNameList\\"}}}}}}]" ' - '--parameters "{{\\"OutputBlobNameList\\":{{\\"type\\":\\"Array\\"}}}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_Create -@try_manual -def step_triggers_create(test, rg): - test.cmd('az datafactory trigger create ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' - 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' - 'nceName\\":\\"{myPipeline}\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T' - '00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13' - '.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_Update -@try_manual -def step_triggers_update(test, rg): - test.cmd('az datafactory trigger update ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--description "Example description" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Create -@try_manual -def step_integrationruntimes_create(test, rg): - test.cmd('az datafactory integration-runtime self-hosted create ' - '--factory-name "{myFactory}" ' - '--description "A selfhosted integration runtime" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Update -@try_manual -def step_integrationruntimes_update(test, rg): - test.cmd('az datafactory integration-runtime update ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}" ' - '--auto-update "Off" ' - '--update-delay-offset "\\"PT3H\\""', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_CreateLinkedIntegrationRuntime -@try_manual -def step_integrationruntimes_createlinkedintegrationru(test, rg): - test.cmd('az datafactory integration-runtime linked-integration-runtime create ' - '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' - '--location "West US" ' - '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' - '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' - '--factory-name "{myFactory}" ' - '--integration-runtime-name "{myIntegrationRuntime}" ' - '--resource-group "{rg}" ' - '--subscription-id "12345678-1234-1234-1234-12345678abc"', - checks=[]) - - -# EXAMPLE: Pipelines_CreateRun -@try_manual -def step_pipelines_createrun(test, rg): - test.cmd('az datafactory pipeline create-run ' - '--factory-name "{myFactory}" ' - '--parameters "{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Get -@try_manual -def step_integrationruntimes_get(test, rg): - test.cmd('az datafactory integration-runtime show ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: RerunTriggers_ListByTrigger -@try_manual -def step_reruntriggers_listbytrigger(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: LinkedServices_Get -@try_manual -def step_linkedservices_get(test, rg): - test.cmd('az datafactory linked-service show ' - '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: PipelineRuns_Get -@try_manual -def step_pipelineruns_get(test, rg): - test.cmd('az datafactory pipeline-run show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', - checks=[]) - - -# EXAMPLE: Pipelines_Get -@try_manual -def step_pipelines_get(test, rg): - test.cmd('az datafactory pipeline show ' - '--factory-name "{myFactory}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Get -@try_manual -def step_datasets_get(test, rg): - test.cmd('az datafactory dataset show ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_Get -@try_manual -def step_triggers_get(test, rg): - test.cmd('az datafactory trigger show ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_ListByFactory -@try_manual -def step_integrationruntimes_listbyfactory(test, rg): - test.cmd('az datafactory integration-runtime list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_ListByFactory -@try_manual -def step_linkedservices_listbyfactory(test, rg): - test.cmd('az datafactory linked-service list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Pipelines_ListByFactory -@try_manual -def step_pipelines_listbyfactory(test, rg): - test.cmd('az datafactory pipeline list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_ListByFactory -@try_manual -def step_triggers_listbyfactory(test, rg): - test.cmd('az datafactory trigger list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_ListByFactory -@try_manual -def step_datasets_listbyfactory(test, rg): - test.cmd('az datafactory dataset list ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_Get -@try_manual -def step_factories_get(test, rg): - test.cmd('az datafactory factory show ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_ListByResourceGroup -@try_manual -def step_factories_listbyresourcegroup(test, rg): - test.cmd('az datafactory factory list ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_List -@try_manual -def step_factories_list(test, rg): - test.cmd('az datafactory factory list ' - '-g ""', - checks=[]) - - -# EXAMPLE: Operations_List -@try_manual -def step_operations_list(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: RerunTriggers_Cancel -@try_manual -def step_reruntriggers_cancel(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: RerunTriggers_Start -@try_manual -def step_reruntriggers_start(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: RerunTriggers_Stop -@try_manual -def step_reruntriggers_stop(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: IntegrationRuntimes_RegenerateAuthKey -@try_manual -def step_integrationruntimes_regenerateauthkey(test, rg): - test.cmd('az datafactory integration-runtime regenerate-auth-key ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--key-name "authKey2" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: TriggerRuns_Rerun -@try_manual -def step_triggerruns_rerun(test, rg): - # EXAMPLE NOT FOUND! - pass - - -# EXAMPLE: IntegrationRuntimes_GetConnectionInfo -@try_manual -def step_integrationruntimes_getconnectioninfo(test, rg): - test.cmd('az datafactory integration-runtime get-connection-info ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_SyncCredentials -@try_manual -def step_integrationruntimes_synccredentials(test, rg): - test.cmd('az datafactory integration-runtime sync-credentials ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_GetMonitoringData -@try_manual -def step_integrationruntimes_getmonitoringdata(test, rg): - test.cmd('az datafactory integration-runtime get-monitoring-data ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_ListAuthKeys -@try_manual -def step_integrationruntimes_listauthkeys(test, rg): - test.cmd('az datafactory integration-runtime list-auth-key ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Upgrade -@try_manual -def step_integrationruntimes_upgrade(test, rg): - test.cmd('az datafactory integration-runtime remove-link ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--linked-factory-name "exampleFactoryName-linked" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_GetStatus -@try_manual -def step_integrationruntimes_getstatus(test, rg): - test.cmd('az datafactory integration-runtime get-status ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Start -@try_manual -def step_integrationruntimes_start(test, rg): - test.cmd('az datafactory integration-runtime start ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime2}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Stop -@try_manual -def step_integrationruntimes_stop(test, rg): - test.cmd('az datafactory integration-runtime stop ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime2}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_GetEventSubscriptionStatus -@try_manual -def step_triggers_geteventsubscriptionstatus(test, rg): - test.cmd('az datafactory trigger get-event-subscription-status ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: ActivityRuns_QueryByPipelineRun -@try_manual -def step_activityruns_querybypipelinerun(test, rg): - test.cmd('az datafactory activity-run query-by-pipeline-run ' - '--factory-name "{myFactory}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}" ' - '--run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b"', - checks=[]) - - -# EXAMPLE: Triggers_UnsubscribeFromEvents -@try_manual -def step_triggers_unsubscribefromevents(test, rg): - test.cmd('az datafactory trigger unsubscribe-from-event ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_SubscribeToEvents -@try_manual -def step_triggers_subscribetoevents(test, rg): - test.cmd('az datafactory trigger subscribe-to-event ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_Start -@try_manual -def step_triggers_start(test, rg): - test.cmd('az datafactory trigger start ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Triggers_Stop -@try_manual -def step_triggers_stop(test, rg): - test.cmd('az datafactory trigger stop ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Factories_GetGitHubAccessToken -@try_manual -def step_factories_getgithubaccesstoken(test, rg): - test.cmd('az datafactory factory get-git-hub-access-token ' - '--name "{myFactory}" ' - '--git-hub-access-code "some" ' - '--git-hub-access-token-base-url "some" ' - '--git-hub-client-id "some" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_GetDataPlaneAccess -@try_manual -def step_factories_getdataplaneaccess(test, rg): - test.cmd('az datafactory factory get-data-plane-access ' - '--name "{myFactory}" ' - '--access-resource-path "" ' - '--expire-time "2018-11-10T09:46:20.2659347Z" ' - '--permissions "r" ' - '--profile-name "DefaultProfile" ' - '--start-time "2018-11-10T02:46:20.2659347Z" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: PipelineRuns_QueryByFactory -@try_manual -def step_pipelineruns_querybyfactory(test, rg): - test.cmd('az datafactory pipeline-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--filters operand="PipelineName" operator="Equals" values="{myPipeline}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: PipelineRuns_Cancel -@try_manual -def step_pipelineruns_cancel(test, rg): - test.cmd('az datafactory pipeline-run cancel ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721"', - checks=[]) - - -# EXAMPLE: TriggerRuns_QueryByFactory -@try_manual -def step_triggerruns_querybyfactory(test, rg): - test.cmd('az datafactory trigger-run query-by-factory ' - '--factory-name "{myFactory}" ' - '--filters operand="TriggerName" operator="Equals" values="{myTrigger}" ' - '--last-updated-after "2018-06-16T00:36:44.3345758Z" ' - '--last-updated-before "2018-06-16T00:49:48.3686473Z" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_ConfigureFactoryRepo -@try_manual -def step_factories_configurefactoryrepo(test, rg): - test.cmd('az datafactory factory configure-factory-repo ' - '--factory-resource-id "/subscriptions/{subscription_id}/resourceGroups/{rg}/providers/Microsoft.DataFacto' - 'ry/factories/{myFactory}" ' - '--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" ' - 'project-name="project" repository-name="repo" root-folder="/" tenant-id="" ' - '--location "East US"', - checks=[]) - - -# EXAMPLE: IntegrationRuntimes_Delete -@try_manual -def step_integrationruntimes_delete(test, rg): - test.cmd('az datafactory integration-runtime delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myIntegrationRuntime}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Triggers_Delete -@try_manual -def step_triggers_delete(test, rg): - test.cmd('az datafactory trigger delete -y ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}" ' - '--name "{myTrigger}"', - checks=[]) - - -# EXAMPLE: Pipelines_Delete -@try_manual -def step_pipelines_delete(test, rg): - test.cmd('az datafactory pipeline delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myPipeline}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Datasets_Delete -@try_manual -def step_datasets_delete(test, rg): - test.cmd('az datafactory dataset delete -y ' - '--name "{myDataset}" ' - '--factory-name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: LinkedServices_Delete -@try_manual -def step_linkedservices_delete(test, rg): - test.cmd('az datafactory linked-service delete -y ' - '--factory-name "{myFactory}" ' - '--name "{myLinkedService}" ' - '--resource-group "{rg}"', - checks=[]) - - -# EXAMPLE: Factories_Delete -@try_manual -def step_factories_delete(test, rg): - test.cmd('az datafactory factory delete -y ' - '--name "{myFactory}" ' - '--resource-group "{rg}"', - checks=[]) - - -@try_manual -def cleanup(test, rg): +def cleanup_scenario(test, rg): pass +# Testcase: Scenario @try_manual def call_scenario(test, rg): - setup(test, rg) - step_factories_createorupdate(test, rg) - step_factories_update(test, rg) - step_linkedservices_create(test, rg) - step_linkedservices_update(test, rg) - step_datasets_create(test, rg) - step_datasets_update(test, rg) - step_pipelines_create(test, rg) - step_pipelines_update(test, rg) - step_triggers_create(test, rg) - step_triggers_update(test, rg) - step_integrationruntimes_create(test, rg) - step_integrationruntimes_update(test, rg) - step_integrationruntimes_createlinkedintegrationru(test, rg) - step_pipelines_createrun(test, rg) - step_integrationruntimes_get(test, rg) - step_reruntriggers_listbytrigger(test, rg) - step_linkedservices_get(test, rg) - step_pipelineruns_get(test, rg) - step_pipelines_get(test, rg) - step_datasets_get(test, rg) - step_triggers_get(test, rg) - step_integrationruntimes_listbyfactory(test, rg) - step_linkedservices_listbyfactory(test, rg) - step_pipelines_listbyfactory(test, rg) - step_triggers_listbyfactory(test, rg) - step_datasets_listbyfactory(test, rg) - step_factories_get(test, rg) - step_factories_listbyresourcegroup(test, rg) - step_factories_list(test, rg) - step_operations_list(test, rg) - step_reruntriggers_cancel(test, rg) - step_reruntriggers_start(test, rg) - step_reruntriggers_stop(test, rg) - step_integrationruntimes_regenerateauthkey(test, rg) - step_triggerruns_rerun(test, rg) - step_integrationruntimes_getconnectioninfo(test, rg) - step_integrationruntimes_synccredentials(test, rg) - step_integrationruntimes_getmonitoringdata(test, rg) - step_integrationruntimes_listauthkeys(test, rg) - step_integrationruntimes_upgrade(test, rg) - step_integrationruntimes_getstatus(test, rg) - step_integrationruntimes_start(test, rg) - step_integrationruntimes_stop(test, rg) - step_triggers_geteventsubscriptionstatus(test, rg) - step_activityruns_querybypipelinerun(test, rg) - step_triggers_unsubscribefromevents(test, rg) - step_triggers_subscribetoevents(test, rg) - step_triggers_start(test, rg) - step_triggers_stop(test, rg) - step_factories_getgithubaccesstoken(test, rg) - step_factories_getdataplaneaccess(test, rg) - step_pipelineruns_querybyfactory(test, rg) - step_pipelineruns_cancel(test, rg) - step_triggerruns_querybyfactory(test, rg) - step_factories_configurefactoryrepo(test, rg) - step_integrationruntimes_delete(test, rg) - step_triggers_delete(test, rg) - step_pipelines_delete(test, rg) - step_datasets_delete(test, rg) - step_linkedservices_delete(test, rg) - step_factories_delete(test, rg) - cleanup(test, rg) - - -@try_manual -class DataFactoryManagementClientScenarioTest(ScenarioTest): - - @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') - def test_datafactory(self, rg): - + setup_scenario(test, rg) + step_create(test, rg, checks=[]) + step_update(test, rg, checks=[]) + step_linked_service_create(test, rg, checks=[]) + step_linked_service_update(test, rg, checks=[]) + step_dataset_create(test, rg, checks=[]) + step_dataset_update(test, rg, checks=[]) + step_pipeline_create(test, rg, checks=[]) + step_pipeline_update(test, rg, checks=[]) + step_trigger_create(test, rg, checks=[]) + step_trigger_update(test, rg, checks=[]) + step_integration_runtime_self_hosted_create(test, rg, checks=[]) + step_integration_runtime_update(test, rg, checks=[]) + step_integration_runtime_linked(test, rg, checks=[]) + step_pipeline_create_run(test, rg, checks=[]) + step_integration_runtime_show(test, rg, checks=[]) + # STEP NOT FOUND: RerunTriggers_ListByTrigger + step_linked_service_show(test, rg, checks=[]) + step_pipeline_run_show(test, rg, checks=[]) + step_pipeline_show(test, rg, checks=[]) + step_dataset_show(test, rg, checks=[]) + step_trigger_show(test, rg, checks=[]) + step_integration_runtime_list(test, rg, checks=[]) + step_linked_service_list(test, rg, checks=[]) + step_pipeline_list(test, rg, checks=[]) + step_trigger_list(test, rg, checks=[]) + step_dataset_list(test, rg, checks=[]) + step_show(test, rg, checks=[]) + step_list2(test, rg, checks=[]) + step_list(test, rg, checks=[]) + # STEP NOT FOUND: Operations_List + # STEP NOT FOUND: RerunTriggers_Cancel + # STEP NOT FOUND: RerunTriggers_Start + # STEP NOT FOUND: RerunTriggers_Stop + step_integration_runtime_regenerate_auth_key(test, rg, checks=[]) + # STEP NOT FOUND: TriggerRuns_Rerun + step_integration_runtime_get_connection_info(test, rg, checks=[]) + step_integration_runtime_sync_credentials(test, rg, checks=[]) + step_integration_runtime_get_monitoring_data(test, rg, checks=[]) + step_integration_runtime_list_auth_key(test, rg, checks=[]) + step_integration_runtime_remove_link(test, rg, checks=[]) + step_integration_runtime_get_status(test, rg, checks=[]) + step_integration_runtime_start(test, rg, checks=[]) + step_integration_runtime_stop(test, rg, checks=[]) + step_trigger_get_event_subscription_status(test, rg, checks=[]) + step_activity_run_query_by_pipeline_run(test, rg, checks=[]) + step_trigger_unsubscribe_from_event(test, rg, checks=[]) + step_trigger_subscribe_to_event(test, rg, checks=[]) + step_trigger_start(test, rg, checks=[]) + step_trigger_stop(test, rg, checks=[]) + step_get_git_hub_access_token(test, rg, checks=[]) + step_get_data_plane_access(test, rg, checks=[]) + step_pipeline_run_query_by_factory(test, rg, checks=[]) + step_pipeline_run_cancel(test, rg, checks=[]) + step_trigger_run_query_by_factory(test, rg, checks=[]) + step_configure_factory_repo(test, rg, checks=[]) + step_integration_runtime_delete(test, rg, checks=[]) + step_trigger_delete(test, rg, checks=[]) + step_pipeline_delete(test, rg, checks=[]) + step_dataset_delete(test, rg, checks=[]) + step_linked_service_delete(test, rg, checks=[]) + step_delete(test, rg, checks=[]) + cleanup_scenario(test, rg) + + +# Test class for Scenario +@try_manual +class DatafactoryScenarioTest(ScenarioTest): + + def __init__(self, *args, **kwargs): + super(DatafactoryScenarioTest, self).__init__(*args, **kwargs) self.kwargs.update({ 'subscription_id': self.get_subscription_id() }) @@ -763,6 +176,10 @@ def test_datafactory(self, rg): 'myTrigger': self.create_random_name(prefix='exampleTrigger'[:7], length=14), }) + + @ResourceGroupPreparer(name_prefix='clitestdatafactory_exampleResourceGroup'[:7], key='rg', parameter_name='rg') + def test_datafactory_Scenario(self, rg): call_scenario(self, rg) calc_coverage(__file__) raise_if() + diff --git a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md b/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md deleted file mode 100644 index 9f0a5f555cf..00000000000 --- a/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario_coverage.md +++ /dev/null @@ -1,56 +0,0 @@ -|Scenario|Result|ErrorMessage|ErrorStack|ErrorNormalized|StartDt|EndDt| -|step_factories_createorupdate|successed||||2020-09-09 03:20:36.459026|2020-09-09 03:20:47.009536| -|step_factories_update|successed||||2020-09-09 03:13:09.171999|2020-09-09 03:13:17.158415| -|step_linkedservices_create|successed||||2020-09-09 03:13:17.158521|2020-09-09 03:13:18.391822| -|step_linkedservices_update|successed||||2020-09-09 03:13:18.391936|2020-09-09 03:13:20.086697| -|step_datasets_create|successed||||2020-09-09 03:13:20.086867|2020-09-09 03:13:21.030817| -|step_datasets_update|successed||||2020-09-09 03:13:21.030945|2020-09-09 03:13:23.212312| -|step_pipelines_create|successed||||2020-09-09 03:13:23.212418|2020-09-09 03:13:24.144046| -|step_pipelines_update|successed||||2020-09-09 03:13:24.144222|2020-09-09 03:13:25.633789| -|step_triggers_create|successed||||2020-09-09 03:13:25.633898|2020-09-09 03:13:26.656790| -|step_triggers_update|successed||||2020-09-09 03:13:26.656960|2020-09-09 03:13:28.908454| -|step_integrationruntimes_create|successed||||2020-09-09 03:13:28.908624|2020-09-09 03:13:30.048626| -|step_integrationruntimes_update|successed||||2020-09-09 03:13:30.048749|2020-09-09 03:13:30.718810| -|step_pipelines_createrun|successed||||2020-09-09 03:20:58.002857|2020-09-09 03:20:59.633723| -|step_integrationruntimes_get|successed||||2020-09-09 03:14:47.710770|2020-09-09 03:14:48.302708| -|step_reruntriggers_listbytrigger|successed||||2020-09-09 03:13:55.851345|2020-09-09 03:13:55.851351| -|step_linkedservices_get|successed||||2020-09-09 03:13:55.851669|2020-09-09 03:13:56.412982| -|step_pipelines_get|successed||||2020-09-09 03:13:56.413198|2020-09-09 03:13:57.000576| -|step_datasets_get|successed||||2020-09-09 03:13:57.000711|2020-09-09 03:13:57.627004| -|step_triggers_get|successed||||2020-09-09 03:21:05.240434|2020-09-09 03:21:05.843100| -|step_integrationruntimes_listbyfactory|successed||||2020-09-09 03:13:58.229772|2020-09-09 03:13:58.825227| -|step_linkedservices_listbyfactory|successed||||2020-09-09 03:13:58.825356|2020-09-09 03:13:59.474421| -|step_pipelines_listbyfactory|successed||||2020-09-09 03:13:59.474573|2020-09-09 03:14:00.075676| -|step_triggers_listbyfactory|successed||||2020-09-09 03:14:00.075778|2020-09-09 03:14:00.659819| -|step_datasets_listbyfactory|successed||||2020-09-09 03:14:00.659993|2020-09-09 03:14:01.933289| -|step_factories_get|successed||||2020-09-09 03:14:01.933391|2020-09-09 03:14:02.496092| -|step_factories_listbyresourcegroup|successed||||2020-09-09 03:14:02.496256|2020-09-09 03:14:03.065882| -|step_factories_list|successed||||2020-09-09 03:14:03.066110|2020-09-09 03:14:03.652226| -|step_operations_list|successed||||2020-09-09 03:14:03.652474|2020-09-09 03:14:03.652478| -|step_integrationruntimes_regenerateauthkey|successed||||2020-09-09 03:14:03.652652|2020-09-09 03:14:04.485017| -|step_integrationruntimes_synccredentials|successed||||2020-09-09 03:14:04.485147|2020-09-09 03:14:05.267252| -|step_integrationruntimes_getmonitoringdata|successed||||2020-09-09 03:14:05.267586|2020-09-09 03:14:05.885271| -|step_integrationruntimes_listauthkeys|successed||||2020-09-09 03:14:05.885388|2020-09-09 03:14:06.561146| -|step_integrationruntimes_upgrade|successed||||2020-09-09 03:14:06.561314|2020-09-09 03:14:07.329602| -|step_integrationruntimes_getstatus|successed||||2020-09-09 03:14:07.329744|2020-09-09 03:14:07.953859| -|step_triggers_geteventsubscriptionstatus|successed||||2020-09-09 03:14:07.953969|2020-09-09 03:14:08.601782| -|step_triggers_unsubscribefromevents|successed||||2020-09-09 03:14:08.601956|2020-09-09 03:14:09.397017| -|step_triggers_subscribetoevents|successed||||2020-09-09 03:14:09.397108|2020-09-09 03:14:10.182324| -|step_triggers_start|successed||||2020-09-09 03:21:02.232388|2020-09-09 03:21:05.240220| -|step_triggers_stop|successed||||2020-09-09 03:26:10.167791|2020-09-09 03:26:12.927439| -|step_factories_getdataplaneaccess|successed||||2020-09-09 03:14:15.489956|2020-09-09 03:14:16.213205| -|step_triggerruns_querybyfactory|successed||||2020-09-09 03:26:09.523472|2020-09-09 03:26:10.167619| -|step_factories_configurefactoryrepo|successed||||2020-09-09 03:14:16.845877|2020-09-09 03:14:19.093687| -|step_integrationruntimes_delete|successed||||2020-09-09 03:20:29.384647|2020-09-09 03:20:31.116770| -|step_triggers_delete|successed||||2020-09-09 03:26:12.927659|2020-09-09 03:26:17.287524| -|step_pipelines_delete|successed||||2020-09-09 03:26:17.287730|2020-09-09 03:26:21.728041| -|step_datasets_delete|successed||||2020-09-09 03:14:27.023092|2020-09-09 03:14:28.558416| -|step_linkedservices_delete|successed||||2020-09-09 03:14:28.558520|2020-09-09 03:14:29.608796| -|step_factories_delete|successed||||2020-09-09 03:26:21.728199|2020-09-09 03:26:28.588917| -|step_integrationruntimes_start|successed||||2020-09-09 03:14:48.302813|2020-09-09 03:16:06.820258| -|step_integrationruntimes_stop|successed||||2020-09-09 03:16:06.820543|2020-09-09 03:20:29.384268| -|step_pipelineruns_get|successed||||2020-09-09 03:20:55.257577|2020-09-09 03:20:56.603108| -|step_activityruns_querybypipelinerun|successed||||2020-09-09 03:20:56.603386|2020-09-09 03:20:58.002421| -|step_pipelineruns_cancel|successed||||2020-09-09 03:20:59.634195|2020-09-09 03:21:00.580469| -|step_triggerruns_rerun|successed||||2020-09-09 03:26:07.876793|2020-09-09 03:26:09.523263| -Coverage: 54/54 diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py index 84eac8676c6..3e3cbab9738 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_configuration.py @@ -48,8 +48,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py index ab18f1f6cb4..9f90ad49d4a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_data_factory_management_client.py @@ -18,63 +18,63 @@ from azure.core.credentials import TokenCredential from ._configuration import DataFactoryManagementClientConfiguration -from .operations import OperationOperations -from .operations import FactoryOperations +from .operations import Operations +from .operations import FactoriesOperations from .operations import ExposureControlOperations -from .operations import IntegrationRuntimeOperations +from .operations import IntegrationRuntimesOperations from .operations import IntegrationRuntimeObjectMetadataOperations -from .operations import IntegrationRuntimeNodeOperations -from .operations import LinkedServiceOperations -from .operations import DatasetOperations -from .operations import PipelineOperations -from .operations import PipelineRunOperations -from .operations import ActivityRunOperations -from .operations import TriggerOperations -from .operations import TriggerRunOperations -from .operations import DataFlowOperations +from .operations import IntegrationRuntimeNodesOperations +from .operations import LinkedServicesOperations +from .operations import DatasetsOperations +from .operations import PipelinesOperations +from .operations import PipelineRunsOperations +from .operations import ActivityRunsOperations +from .operations import TriggersOperations +from .operations import TriggerRunsOperations +from .operations import DataFlowsOperations from .operations import DataFlowDebugSessionOperations -from .operations import ManagedVirtualNetworkOperations -from .operations import ManagedPrivateEndpointOperations +from .operations import ManagedVirtualNetworksOperations +from .operations import ManagedPrivateEndpointsOperations from . import models class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.operations.OperationOperations - :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.operations.FactoryOperations + :ivar operations: Operations operations + :vartype operations: data_factory_management_client.operations.Operations + :ivar factories: FactoriesOperations operations + :vartype factories: data_factory_management_client.operations.FactoriesOperations :ivar exposure_control: ExposureControlOperations operations :vartype exposure_control: data_factory_management_client.operations.ExposureControlOperations - :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.operations.IntegrationRuntimeOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: data_factory_management_client.operations.IntegrationRuntimesOperations :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations :vartype integration_runtime_object_metadata: data_factory_management_client.operations.IntegrationRuntimeObjectMetadataOperations - :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.operations.IntegrationRuntimeNodeOperations - :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.operations.LinkedServiceOperations - :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.operations.DatasetOperations - :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.operations.PipelineOperations - :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.operations.PipelineRunOperations - :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.operations.ActivityRunOperations - :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.operations.TriggerOperations - :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.operations.TriggerRunOperations - :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.operations.DataFlowOperations + :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations + :vartype integration_runtime_nodes: data_factory_management_client.operations.IntegrationRuntimeNodesOperations + :ivar linked_services: LinkedServicesOperations operations + :vartype linked_services: data_factory_management_client.operations.LinkedServicesOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: data_factory_management_client.operations.DatasetsOperations + :ivar pipelines: PipelinesOperations operations + :vartype pipelines: data_factory_management_client.operations.PipelinesOperations + :ivar pipeline_runs: PipelineRunsOperations operations + :vartype pipeline_runs: data_factory_management_client.operations.PipelineRunsOperations + :ivar activity_runs: ActivityRunsOperations operations + :vartype activity_runs: data_factory_management_client.operations.ActivityRunsOperations + :ivar triggers: TriggersOperations operations + :vartype triggers: data_factory_management_client.operations.TriggersOperations + :ivar trigger_runs: TriggerRunsOperations operations + :vartype trigger_runs: data_factory_management_client.operations.TriggerRunsOperations + :ivar data_flows: DataFlowsOperations operations + :vartype data_flows: data_factory_management_client.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations :vartype data_flow_debug_session: data_factory_management_client.operations.DataFlowDebugSessionOperations - :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.operations.ManagedVirtualNetworkOperations - :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.operations.ManagedPrivateEndpointOperations + :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations + :vartype managed_virtual_networks: data_factory_management_client.operations.ManagedVirtualNetworksOperations + :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations + :vartype managed_private_endpoints: data_factory_management_client.operations.ManagedPrivateEndpointsOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. @@ -100,39 +100,39 @@ def __init__( self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) - self.operation = OperationOperations( + self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) - self.factory = FactoryOperations( + self.factories = FactoriesOperations( self._client, self._config, self._serialize, self._deserialize) self.exposure_control = ExposureControlOperations( self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime = IntegrationRuntimeOperations( + self.integration_runtimes = IntegrationRuntimesOperations( self._client, self._config, self._serialize, self._deserialize) self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_node = IntegrationRuntimeNodeOperations( + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( self._client, self._config, self._serialize, self._deserialize) - self.linked_service = LinkedServiceOperations( + self.linked_services = LinkedServicesOperations( self._client, self._config, self._serialize, self._deserialize) - self.dataset = DatasetOperations( + self.datasets = DatasetsOperations( self._client, self._config, self._serialize, self._deserialize) - self.pipeline = PipelineOperations( + self.pipelines = PipelinesOperations( self._client, self._config, self._serialize, self._deserialize) - self.pipeline_run = PipelineRunOperations( + self.pipeline_runs = PipelineRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.activity_run = ActivityRunOperations( + self.activity_runs = ActivityRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.trigger = TriggerOperations( + self.triggers = TriggersOperations( self._client, self._config, self._serialize, self._deserialize) - self.trigger_run = TriggerRunOperations( + self.trigger_runs = TriggerRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.data_flow = DataFlowOperations( + self.data_flows = DataFlowsOperations( self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_network = ManagedVirtualNetworkOperations( + self.managed_virtual_networks = ManagedVirtualNetworksOperations( self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoint = ManagedPrivateEndpointOperations( + self.managed_private_endpoints = ManagedPrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) def close(self): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py index 5cde5bc8d05..571673cab5c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/__init__.py @@ -6,5 +6,5 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._data_factory_management_client_async import DataFactoryManagementClient +from ._data_factory_management_client import DataFactoryManagementClient __all__ = ['DataFactoryManagementClient'] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py similarity index 95% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py index 411d6c4a66e..c88a091bdb9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_configuration.py @@ -45,8 +45,7 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = "2018-06-01" - self.credential_scopes = ['https://management.azure.com/.default'] - self.credential_scopes.extend(kwargs.pop('credential_scopes', [])) + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'datafactorymanagementclient/{}'.format(VERSION)) self._configure(**kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py similarity index 50% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py index b2b322686b8..43ced0179e9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_data_factory_management_client.py @@ -15,64 +15,64 @@ # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration_async import DataFactoryManagementClientConfiguration -from .operations_async import OperationOperations -from .operations_async import FactoryOperations -from .operations_async import ExposureControlOperations -from .operations_async import IntegrationRuntimeOperations -from .operations_async import IntegrationRuntimeObjectMetadataOperations -from .operations_async import IntegrationRuntimeNodeOperations -from .operations_async import LinkedServiceOperations -from .operations_async import DatasetOperations -from .operations_async import PipelineOperations -from .operations_async import PipelineRunOperations -from .operations_async import ActivityRunOperations -from .operations_async import TriggerOperations -from .operations_async import TriggerRunOperations -from .operations_async import DataFlowOperations -from .operations_async import DataFlowDebugSessionOperations -from .operations_async import ManagedVirtualNetworkOperations -from .operations_async import ManagedPrivateEndpointOperations +from ._configuration import DataFactoryManagementClientConfiguration +from .operations import Operations +from .operations import FactoriesOperations +from .operations import ExposureControlOperations +from .operations import IntegrationRuntimesOperations +from .operations import IntegrationRuntimeObjectMetadataOperations +from .operations import IntegrationRuntimeNodesOperations +from .operations import LinkedServicesOperations +from .operations import DatasetsOperations +from .operations import PipelinesOperations +from .operations import PipelineRunsOperations +from .operations import ActivityRunsOperations +from .operations import TriggersOperations +from .operations import TriggerRunsOperations +from .operations import DataFlowsOperations +from .operations import DataFlowDebugSessionOperations +from .operations import ManagedVirtualNetworksOperations +from .operations import ManagedPrivateEndpointsOperations from .. import models class DataFactoryManagementClient(object): """The Azure Data Factory V2 management API provides a RESTful set of web services that interact with Azure Data Factory V2 services. - :ivar operation: OperationOperations operations - :vartype operation: data_factory_management_client.aio.operations_async.OperationOperations - :ivar factory: FactoryOperations operations - :vartype factory: data_factory_management_client.aio.operations_async.FactoryOperations + :ivar operations: Operations operations + :vartype operations: data_factory_management_client.aio.operations.Operations + :ivar factories: FactoriesOperations operations + :vartype factories: data_factory_management_client.aio.operations.FactoriesOperations :ivar exposure_control: ExposureControlOperations operations - :vartype exposure_control: data_factory_management_client.aio.operations_async.ExposureControlOperations - :ivar integration_runtime: IntegrationRuntimeOperations operations - :vartype integration_runtime: data_factory_management_client.aio.operations_async.IntegrationRuntimeOperations + :vartype exposure_control: data_factory_management_client.aio.operations.ExposureControlOperations + :ivar integration_runtimes: IntegrationRuntimesOperations operations + :vartype integration_runtimes: data_factory_management_client.aio.operations.IntegrationRuntimesOperations :ivar integration_runtime_object_metadata: IntegrationRuntimeObjectMetadataOperations operations - :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations_async.IntegrationRuntimeObjectMetadataOperations - :ivar integration_runtime_node: IntegrationRuntimeNodeOperations operations - :vartype integration_runtime_node: data_factory_management_client.aio.operations_async.IntegrationRuntimeNodeOperations - :ivar linked_service: LinkedServiceOperations operations - :vartype linked_service: data_factory_management_client.aio.operations_async.LinkedServiceOperations - :ivar dataset: DatasetOperations operations - :vartype dataset: data_factory_management_client.aio.operations_async.DatasetOperations - :ivar pipeline: PipelineOperations operations - :vartype pipeline: data_factory_management_client.aio.operations_async.PipelineOperations - :ivar pipeline_run: PipelineRunOperations operations - :vartype pipeline_run: data_factory_management_client.aio.operations_async.PipelineRunOperations - :ivar activity_run: ActivityRunOperations operations - :vartype activity_run: data_factory_management_client.aio.operations_async.ActivityRunOperations - :ivar trigger: TriggerOperations operations - :vartype trigger: data_factory_management_client.aio.operations_async.TriggerOperations - :ivar trigger_run: TriggerRunOperations operations - :vartype trigger_run: data_factory_management_client.aio.operations_async.TriggerRunOperations - :ivar data_flow: DataFlowOperations operations - :vartype data_flow: data_factory_management_client.aio.operations_async.DataFlowOperations + :vartype integration_runtime_object_metadata: data_factory_management_client.aio.operations.IntegrationRuntimeObjectMetadataOperations + :ivar integration_runtime_nodes: IntegrationRuntimeNodesOperations operations + :vartype integration_runtime_nodes: data_factory_management_client.aio.operations.IntegrationRuntimeNodesOperations + :ivar linked_services: LinkedServicesOperations operations + :vartype linked_services: data_factory_management_client.aio.operations.LinkedServicesOperations + :ivar datasets: DatasetsOperations operations + :vartype datasets: data_factory_management_client.aio.operations.DatasetsOperations + :ivar pipelines: PipelinesOperations operations + :vartype pipelines: data_factory_management_client.aio.operations.PipelinesOperations + :ivar pipeline_runs: PipelineRunsOperations operations + :vartype pipeline_runs: data_factory_management_client.aio.operations.PipelineRunsOperations + :ivar activity_runs: ActivityRunsOperations operations + :vartype activity_runs: data_factory_management_client.aio.operations.ActivityRunsOperations + :ivar triggers: TriggersOperations operations + :vartype triggers: data_factory_management_client.aio.operations.TriggersOperations + :ivar trigger_runs: TriggerRunsOperations operations + :vartype trigger_runs: data_factory_management_client.aio.operations.TriggerRunsOperations + :ivar data_flows: DataFlowsOperations operations + :vartype data_flows: data_factory_management_client.aio.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSessionOperations operations - :vartype data_flow_debug_session: data_factory_management_client.aio.operations_async.DataFlowDebugSessionOperations - :ivar managed_virtual_network: ManagedVirtualNetworkOperations operations - :vartype managed_virtual_network: data_factory_management_client.aio.operations_async.ManagedVirtualNetworkOperations - :ivar managed_private_endpoint: ManagedPrivateEndpointOperations operations - :vartype managed_private_endpoint: data_factory_management_client.aio.operations_async.ManagedPrivateEndpointOperations + :vartype data_flow_debug_session: data_factory_management_client.aio.operations.DataFlowDebugSessionOperations + :ivar managed_virtual_networks: ManagedVirtualNetworksOperations operations + :vartype managed_virtual_networks: data_factory_management_client.aio.operations.ManagedVirtualNetworksOperations + :ivar managed_private_endpoints: ManagedPrivateEndpointsOperations operations + :vartype managed_private_endpoints: data_factory_management_client.aio.operations.ManagedPrivateEndpointsOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. @@ -97,39 +97,39 @@ def __init__( self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) - self.operation = OperationOperations( + self.operations = Operations( self._client, self._config, self._serialize, self._deserialize) - self.factory = FactoryOperations( + self.factories = FactoriesOperations( self._client, self._config, self._serialize, self._deserialize) self.exposure_control = ExposureControlOperations( self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime = IntegrationRuntimeOperations( + self.integration_runtimes = IntegrationRuntimesOperations( self._client, self._config, self._serialize, self._deserialize) self.integration_runtime_object_metadata = IntegrationRuntimeObjectMetadataOperations( self._client, self._config, self._serialize, self._deserialize) - self.integration_runtime_node = IntegrationRuntimeNodeOperations( + self.integration_runtime_nodes = IntegrationRuntimeNodesOperations( self._client, self._config, self._serialize, self._deserialize) - self.linked_service = LinkedServiceOperations( + self.linked_services = LinkedServicesOperations( self._client, self._config, self._serialize, self._deserialize) - self.dataset = DatasetOperations( + self.datasets = DatasetsOperations( self._client, self._config, self._serialize, self._deserialize) - self.pipeline = PipelineOperations( + self.pipelines = PipelinesOperations( self._client, self._config, self._serialize, self._deserialize) - self.pipeline_run = PipelineRunOperations( + self.pipeline_runs = PipelineRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.activity_run = ActivityRunOperations( + self.activity_runs = ActivityRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.trigger = TriggerOperations( + self.triggers = TriggersOperations( self._client, self._config, self._serialize, self._deserialize) - self.trigger_run = TriggerRunOperations( + self.trigger_runs = TriggerRunsOperations( self._client, self._config, self._serialize, self._deserialize) - self.data_flow = DataFlowOperations( + self.data_flows = DataFlowsOperations( self._client, self._config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( self._client, self._config, self._serialize, self._deserialize) - self.managed_virtual_network = ManagedVirtualNetworkOperations( + self.managed_virtual_networks = ManagedVirtualNetworksOperations( self._client, self._config, self._serialize, self._deserialize) - self.managed_private_endpoint = ManagedPrivateEndpointOperations( + self.managed_private_endpoints = ManagedPrivateEndpointsOperations( self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py new file mode 100644 index 00000000000..95d26857909 --- /dev/null +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/__init__.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._operations import Operations +from ._factories_operations import FactoriesOperations +from ._exposure_control_operations import ExposureControlOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations +from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations +from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from ._linked_services_operations import LinkedServicesOperations +from ._datasets_operations import DatasetsOperations +from ._pipelines_operations import PipelinesOperations +from ._pipeline_runs_operations import PipelineRunsOperations +from ._activity_runs_operations import ActivityRunsOperations +from ._triggers_operations import TriggersOperations +from ._trigger_runs_operations import TriggerRunsOperations +from ._data_flows_operations import DataFlowsOperations +from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations +from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations +from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations + +__all__ = [ + 'Operations', + 'FactoriesOperations', + 'ExposureControlOperations', + 'IntegrationRuntimesOperations', + 'IntegrationRuntimeObjectMetadataOperations', + 'IntegrationRuntimeNodesOperations', + 'LinkedServicesOperations', + 'DatasetsOperations', + 'PipelinesOperations', + 'PipelineRunsOperations', + 'ActivityRunsOperations', + 'TriggersOperations', + 'TriggerRunsOperations', + 'DataFlowsOperations', + 'DataFlowDebugSessionOperations', + 'ManagedVirtualNetworksOperations', + 'ManagedPrivateEndpointsOperations', +] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py similarity index 73% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py index 0d2e56be08b..39382a45d74 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_activity_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_activity_runs_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +18,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ActivityRunOperations: - """ActivityRunOperations async operations. +class ActivityRunsOperations: + """ActivityRunsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -46,11 +45,7 @@ async def query_by_pipeline_run( resource_group_name: str, factory_name: str, run_id: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, + filter_parameters: "models.RunFilterParameters", **kwargs ) -> "models.ActivityRunsQueryResponse": """Query activity runs based on input filter conditions. @@ -61,31 +56,21 @@ async def query_by_pipeline_run( :type factory_name: str :param run_id: The pipeline run identifier. :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :param filter_parameters: Parameters to filter the activity runs. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: ActivityRunsQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_pipeline_run.metadata['url'] # type: ignore @@ -104,13 +89,12 @@ async def query_by_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py similarity index 80% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py index f1bf8ee8f73..dbb85249ab9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_debug_session_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flow_debug_session_operations.py @@ -5,11 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -47,20 +47,17 @@ async def _create_initial( self, resource_group_name: str, factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, + request: "models.CreateDataFlowDebugSessionRequest", **kwargs ) -> Optional["models.CreateDataFlowDebugSessionResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore @@ -78,13 +75,12 @@ async def _create_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -110,11 +106,7 @@ async def begin_create( self, resource_group_name: str, factory_name: str, - compute_type: Optional[str] = None, - core_count: Optional[int] = None, - time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["models.IntegrationRuntime"] = None, + request: "models.CreateDataFlowDebugSessionRequest", **kwargs ) -> AsyncLROPoller["models.CreateDataFlowDebugSessionResponse"]: """Creates a data flow debug session. @@ -123,18 +115,8 @@ async def begin_create( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param request: Data flow debug session definition. + :type request: ~data_factory_management_client.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -156,11 +138,7 @@ async def begin_create( raw_result = await self._create_initial( resource_group_name=resource_group_name, factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, + request=request, cls=lambda x,y,z: x, **kwargs ) @@ -175,7 +153,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -207,14 +191,17 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -264,16 +251,7 @@ async def add_data_flow( self, resource_group_name: str, factory_name: str, - session_id: Optional[str] = None, - datasets: Optional[List["models.DatasetDebugResource"]] = None, - linked_services: Optional[List["models.LinkedServiceDebugResource"]] = None, - source_settings: Optional[List["models.DataFlowSourceSetting"]] = None, - parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - name: Optional[str] = None, - properties: Optional["models.DataFlow"] = None, + request: "models.DataFlowDebugPackage", **kwargs ) -> "models.AddDataFlowToDebugSessionResponse": """Add a data flow into debug session. @@ -282,39 +260,21 @@ async def add_data_flow( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param request: Data flow debug session definition with debug content. + :type request: ~data_factory_management_client.models.DataFlowDebugPackage :keyword callable cls: A custom type or function that will be passed the direct response :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -332,13 +292,12 @@ async def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -358,7 +317,7 @@ async def delete( self, resource_group_name: str, factory_name: str, - session_id: Optional[str] = None, + request: "models.DeleteDataFlowDebugSessionRequest", **kwargs ) -> None: """Deletes a data flow debug session. @@ -367,20 +326,21 @@ async def delete( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str + :param request: Data flow debug session definition for deletion. + :type request: ~data_factory_management_client.models.DeleteDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -398,12 +358,12 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -420,18 +380,17 @@ async def _execute_command_initial( self, resource_group_name: str, factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, + request: "models.DataFlowDebugCommandRequest", **kwargs ) -> Optional["models.DataFlowDebugCommandResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -449,13 +408,12 @@ async def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -481,9 +439,7 @@ async def begin_execute_command( self, resource_group_name: str, factory_name: str, - session_id: Optional[str] = None, - command: Optional[Union[str, "models.DataFlowDebugCommandType"]] = None, - command_payload: Optional["models.DataFlowDebugCommandPayload"] = None, + request: "models.DataFlowDebugCommandRequest", **kwargs ) -> AsyncLROPoller["models.DataFlowDebugCommandResponse"]: """Execute a data flow debug command. @@ -492,12 +448,8 @@ async def begin_execute_command( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload + :param request: Data flow debug command definition. + :type request: ~data_factory_management_client.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -519,9 +471,7 @@ async def begin_execute_command( raw_result = await self._execute_command_initial( resource_group_name=resource_group_name, factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, + request=request, cls=lambda x,y,z: x, **kwargs ) @@ -536,7 +486,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py index b5c2e5656ce..20d1ec288ce 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_data_flow_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_data_flows_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class DataFlowOperations: - """DataFlowOperations async operations. +class DataFlowsOperations: + """DataFlowsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -46,7 +46,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, data_flow_name: str, - properties: "models.DataFlow", + data_flow: "models.DataFlowResource", if_match: Optional[str] = None, **kwargs ) -> "models.DataFlowResource": @@ -58,8 +58,8 @@ async def create_or_update( :type factory_name: str :param data_flow_name: The data flow name. :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param data_flow: Data flow resource definition. + :type data_flow: ~data_factory_management_client.models.DataFlowResource :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -69,12 +69,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -95,13 +96,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -142,9 +142,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -164,7 +167,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -203,9 +206,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -223,6 +229,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -255,14 +262,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py index a8be0369365..23cd39c246d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_dataset_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_datasets_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class DatasetOperations: - """DatasetOperations async operations. +class DatasetsOperations: + """DatasetsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -117,7 +120,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, dataset_name: str, - properties: "models.Dataset", + dataset: "models.DatasetResource", if_match: Optional[str] = None, **kwargs ) -> "models.DatasetResource": @@ -129,8 +132,8 @@ async def create_or_update( :type factory_name: str :param dataset_name: The dataset name. :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset + :param dataset: Dataset resource definition. + :type dataset: ~data_factory_management_client.models.DatasetResource :param if_match: ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -140,12 +143,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -166,13 +170,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -213,9 +216,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -235,7 +241,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -276,9 +282,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -296,6 +305,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py similarity index 83% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py index b20acb1c3c8..df180e52804 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_exposure_control_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_exposure_control_operations.py @@ -5,10 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -43,30 +43,28 @@ def __init__(self, client, config, serializer, deserializer) -> None: async def get_feature_value( self, location_id: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, + exposure_control_request: "models.ExposureControlRequest", **kwargs ) -> "models.ExposureControlResponse": """Get exposure control feature for specific location. :param location_id: The location identifier. :type location_id: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value.metadata['url'] # type: ignore @@ -83,13 +81,12 @@ async def get_feature_value( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -109,8 +106,7 @@ async def get_feature_value_by_factory( self, resource_group_name: str, factory_name: str, - feature_name: Optional[str] = None, - feature_type: Optional[str] = None, + exposure_control_request: "models.ExposureControlRequest", **kwargs ) -> "models.ExposureControlResponse": """Get exposure control feature for specific factory. @@ -119,22 +115,21 @@ async def get_feature_value_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value_by_factory.metadata['url'] # type: ignore @@ -152,13 +147,12 @@ async def get_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -174,11 +168,11 @@ async def get_feature_value_by_factory( return deserialized get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore - async def query_feature_value_by_factory( + async def query_feature_values_by_factory( self, resource_group_name: str, factory_name: str, - exposure_control_requests: List["models.ExposureControlRequest"], + exposure_control_batch_request: "models.ExposureControlBatchRequest", **kwargs ) -> "models.ExposureControlBatchResponse": """Get list of exposure control features for specific factory. @@ -187,23 +181,24 @@ async def query_feature_value_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param exposure_control_requests: List of exposure control features. - :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] + :param exposure_control_batch_request: The exposure control request for list of features. + :type exposure_control_batch_request: ~data_factory_management_client.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.query_feature_value_by_factory.metadata['url'] # type: ignore + url = self.query_feature_values_by_factory.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -218,13 +213,12 @@ async def query_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -238,4 +232,4 @@ async def query_feature_value_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized - query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore + query_feature_values_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py similarity index 83% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py index 46f37c1a6f7..f8b64723a03 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_factory_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_factories_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class FactoryOperations: - """FactoryOperations async operations. +class FactoriesOperations: + """FactoriesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -53,14 +53,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -107,30 +110,28 @@ async def get_next(next_link=None): async def configure_factory_repo( self, location_id: str, - factory_resource_id: Optional[str] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, + factory_repo_update: "models.FactoryRepoUpdate", **kwargs ) -> "models.Factory": """Updates a factory's repo information. :param location_id: The location identifier. :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration + :param factory_repo_update: Update factory repo request definition. + :type factory_repo_update: ~data_factory_management_client.models.FactoryRepoUpdate :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.configure_factory_repo.metadata['url'] # type: ignore @@ -147,13 +148,12 @@ async def configure_factory_repo( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -184,14 +184,17 @@ def list_by_resource_group( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -240,12 +243,8 @@ async def create_or_update( self, resource_group_name: str, factory_name: str, + factory: "models.Factory", if_match: Optional[str] = None, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, - repo_configuration: Optional["models.FactoryRepoConfiguration"] = None, - global_parameters: Optional[Dict[str, "models.GlobalParameterSpecification"]] = None, **kwargs ) -> "models.Factory": """Creates or updates a factory. @@ -254,31 +253,24 @@ async def create_or_update( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str + :param factory: Factory resource definition. + :type factory: ~data_factory_management_client.models.Factory :param if_match: ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -298,13 +290,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory, 'Factory') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -324,8 +315,7 @@ async def update( self, resource_group_name: str, factory_name: str, - tags: Optional[Dict[str, str]] = None, - identity: Optional["models.FactoryIdentity"] = None, + factory_update_parameters: "models.FactoryUpdateParameters", **kwargs ) -> "models.Factory": """Updates a factory. @@ -334,22 +324,21 @@ async def update( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity + :param factory_update_parameters: The parameters for updating a factory. + :type factory_update_parameters: ~data_factory_management_client.models.FactoryUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -367,13 +356,12 @@ async def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -411,9 +399,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -432,7 +423,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -470,9 +461,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -489,6 +483,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -507,9 +502,7 @@ async def get_git_hub_access_token( self, resource_group_name: str, factory_name: str, - git_hub_access_code: str, - git_hub_access_token_base_url: str, - git_hub_client_id: Optional[str] = None, + git_hub_access_token_request: "models.GitHubAccessTokenRequest", **kwargs ) -> "models.GitHubAccessTokenResponse": """Get GitHub Access Token. @@ -518,24 +511,21 @@ async def get_git_hub_access_token( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str + :param git_hub_access_token_request: Get GitHub access token request definition. + :type git_hub_access_token_request: ~data_factory_management_client.models.GitHubAccessTokenRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: GitHubAccessTokenResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_git_hub_access_token.metadata['url'] # type: ignore @@ -553,13 +543,12 @@ async def get_git_hub_access_token( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -579,11 +568,7 @@ async def get_data_plane_access( self, resource_group_name: str, factory_name: str, - permissions: Optional[str] = None, - access_resource_path: Optional[str] = None, - profile_name: Optional[str] = None, - start_time: Optional[str] = None, - expire_time: Optional[str] = None, + policy: "models.UserAccessPolicy", **kwargs ) -> "models.AccessPolicyResponse": """Get Data Plane access. @@ -592,32 +577,21 @@ async def get_data_plane_access( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str + :param policy: Data Plane user access policy definition. + :type policy: ~data_factory_management_client.models.UserAccessPolicy :keyword callable cls: A custom type or function that will be passed the direct response :return: AccessPolicyResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.AccessPolicyResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_data_plane_access.metadata['url'] # type: ignore @@ -635,13 +609,12 @@ async def get_data_plane_access( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(policy, 'UserAccessPolicy') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py similarity index 90% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py index a6022196653..098d00bbb3e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_node_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_nodes_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -18,8 +18,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class IntegrationRuntimeNodeOperations: - """IntegrationRuntimeNodeOperations async operations. +class IntegrationRuntimeNodesOperations: + """IntegrationRuntimeNodesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -64,9 +64,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -85,7 +88,7 @@ async def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -127,9 +130,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -148,6 +154,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -168,7 +175,7 @@ async def update( factory_name: str, integration_runtime_name: str, node_name: str, - concurrent_jobs_limit: Optional[int] = None, + update_integration_runtime_node_request: "models.UpdateIntegrationRuntimeNodeRequest", **kwargs ) -> "models.SelfHostedIntegrationRuntimeNode": """Updates a self-hosted integration runtime node. @@ -181,21 +188,22 @@ async def update( :type integration_runtime_name: str :param node_name: The integration runtime node name. :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. + :type update_integration_runtime_node_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeNodeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -215,13 +223,12 @@ async def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -261,9 +268,12 @@ async def get_ip_address( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_ip_address.metadata['url'] # type: ignore @@ -282,7 +292,7 @@ async def get_ip_address( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py similarity index 87% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py index 70df0716c21..a1825a0d1bb 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_object_metadata_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -50,9 +50,12 @@ async def _refresh_initial( **kwargs ) -> Optional["models.SsisObjectMetadataStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._refresh_initial.metadata['url'] # type: ignore @@ -70,7 +73,7 @@ async def _refresh_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -141,7 +144,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -160,7 +170,7 @@ async def get( resource_group_name: str, factory_name: str, integration_runtime_name: str, - metadata_path: Optional[str] = None, + get_metadata_request: Optional["models.GetSsisObjectMetadataRequest"] = None, **kwargs ) -> "models.SsisObjectMetadataListResponse": """Get a SSIS integration runtime object metadata by specified path. The return is pageable @@ -172,20 +182,21 @@ async def get( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param metadata_path: Metadata path. - :type metadata_path: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. + :type get_metadata_request: ~data_factory_management_client.models.GetSsisObjectMetadataRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SsisObjectMetadataListResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -204,7 +215,7 @@ async def get( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: @@ -213,7 +224,6 @@ async def get( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py similarity index 87% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py index 82b285c7a74..6b27efc1819 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_integration_runtimes_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -21,8 +21,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class IntegrationRuntimeOperations: - """IntegrationRuntimeOperations async operations. +class IntegrationRuntimesOperations: + """IntegrationRuntimesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -61,14 +61,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -119,7 +122,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, integration_runtime_name: str, - properties: "models.IntegrationRuntime", + integration_runtime: "models.IntegrationRuntimeResource", if_match: Optional[str] = None, **kwargs ) -> "models.IntegrationRuntimeResource": @@ -131,8 +134,8 @@ async def create_or_update( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param integration_runtime: Integration runtime resource definition. + :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeResource :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -142,12 +145,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -168,13 +172,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -216,9 +219,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -238,7 +244,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -263,8 +269,7 @@ async def update( resource_group_name: str, factory_name: str, integration_runtime_name: str, - auto_update: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] = None, - update_delay_offset: Optional[str] = None, + update_integration_runtime_request: "models.UpdateIntegrationRuntimeRequest", **kwargs ) -> "models.IntegrationRuntimeResource": """Updates an integration runtime. @@ -275,24 +280,21 @@ async def update( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + :type update_integration_runtime_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -311,13 +313,12 @@ async def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -354,9 +355,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -374,6 +378,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -409,9 +414,12 @@ async def get_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_status.metadata['url'] # type: ignore @@ -429,7 +437,7 @@ async def get_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -469,9 +477,12 @@ async def get_connection_info( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_connection_info.metadata['url'] # type: ignore @@ -489,7 +500,7 @@ async def get_connection_info( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -512,7 +523,7 @@ async def regenerate_auth_key( resource_group_name: str, factory_name: str, integration_runtime_name: str, - key_name: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] = None, + regenerate_key_parameters: "models.IntegrationRuntimeRegenerateKeyParameters", **kwargs ) -> "models.IntegrationRuntimeAuthKeys": """Regenerates the authentication key for an integration runtime. @@ -523,20 +534,22 @@ async def regenerate_auth_key( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. + :type regenerate_key_parameters: ~data_factory_management_client.models.IntegrationRuntimeRegenerateKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.regenerate_auth_key.metadata['url'] # type: ignore @@ -555,13 +568,12 @@ async def regenerate_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -577,7 +589,7 @@ async def regenerate_auth_key( return deserialized regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - async def list_auth_key( + async def list_auth_keys( self, resource_group_name: str, factory_name: str, @@ -598,12 +610,15 @@ async def list_auth_key( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore + url = self.list_auth_keys.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -618,7 +633,7 @@ async def list_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -634,7 +649,7 @@ async def list_auth_key( return cls(pipeline_response, deserialized, {}) return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore async def _start_initial( self, @@ -644,9 +659,12 @@ async def _start_initial( **kwargs ) -> Optional["models.IntegrationRuntimeStatusResponse"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -664,7 +682,7 @@ async def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -735,7 +753,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -757,9 +782,12 @@ async def _stop_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -777,6 +805,7 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -839,7 +868,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -877,9 +913,12 @@ async def sync_credentials( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.sync_credentials.metadata['url'] # type: ignore @@ -897,6 +936,7 @@ async def sync_credentials( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -933,9 +973,12 @@ async def get_monitoring_data( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_monitoring_data.metadata['url'] # type: ignore @@ -953,7 +996,7 @@ async def get_monitoring_data( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -992,9 +1035,12 @@ async def upgrade( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.upgrade.metadata['url'] # type: ignore @@ -1012,6 +1058,7 @@ async def upgrade( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -1026,12 +1073,12 @@ async def upgrade( upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - async def remove_link( + async def remove_links( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, - linked_factory_name: str, + linked_integration_runtime_request: "models.LinkedIntegrationRuntimeRequest", **kwargs ) -> None: """Remove all linked integration runtimes under specific data factory in a self-hosted integration @@ -1043,23 +1090,25 @@ async def remove_link( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. + :type linked_integration_runtime_request: ~data_factory_management_client.models.LinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.remove_link.metadata['url'] # type: ignore + url = self.remove_links.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -1075,12 +1124,12 @@ async def remove_link( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1091,17 +1140,14 @@ async def remove_link( if cls: return cls(pipeline_response, None, {}) - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore async def create_linked_integration_runtime( self, resource_group_name: str, factory_name: str, integration_runtime_name: str, - name: Optional[str] = None, - subscription_id: Optional[str] = None, - data_factory_name: Optional[str] = None, - data_factory_location: Optional[str] = None, + create_linked_integration_runtime_request: "models.CreateLinkedIntegrationRuntimeRequest", **kwargs ) -> "models.IntegrationRuntimeStatusResponse": """Create a linked integration runtime entry in a shared integration runtime. @@ -1112,29 +1158,21 @@ async def create_linked_integration_runtime( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. + :type create_linked_integration_runtime_request: ~data_factory_management_client.models.CreateLinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_linked_integration_runtime.metadata['url'] # type: ignore @@ -1153,13 +1191,12 @@ async def create_linked_integration_runtime( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py index 56e9e6f663a..e6444acf5f7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_linked_service_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_linked_services_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class LinkedServiceOperations: - """LinkedServiceOperations async operations. +class LinkedServicesOperations: + """LinkedServicesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -117,7 +120,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, linked_service_name: str, - properties: "models.LinkedService", + linked_service: "models.LinkedServiceResource", if_match: Optional[str] = None, **kwargs ) -> "models.LinkedServiceResource": @@ -129,8 +132,8 @@ async def create_or_update( :type factory_name: str :param linked_service_name: The linked service name. :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :param linked_service: Linked service resource definition. + :type linked_service: ~data_factory_management_client.models.LinkedServiceResource :param if_match: ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -140,12 +143,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -166,13 +170,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -214,9 +217,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -236,7 +242,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -277,9 +283,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -297,6 +306,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py index 3a899779963..3a0dfd46129 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_private_endpoint_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_private_endpoints_operations.py @@ -5,11 +5,11 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ManagedPrivateEndpointOperations: - """ManagedPrivateEndpointOperations async operations. +class ManagedPrivateEndpointsOperations: + """ManagedPrivateEndpointsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -62,14 +62,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -122,11 +125,8 @@ async def create_or_update( factory_name: str, managed_virtual_network_name: str, managed_private_endpoint_name: str, + managed_private_endpoint: "models.ManagedPrivateEndpointResource", if_match: Optional[str] = None, - connection_state: Optional["models.ConnectionStateProperties"] = None, - fqdns: Optional[List[str]] = None, - group_id: Optional[str] = None, - private_link_resource_id: Optional[str] = None, **kwargs ) -> "models.ManagedPrivateEndpointResource": """Creates or updates a managed private endpoint. @@ -139,30 +139,24 @@ async def create_or_update( :type managed_virtual_network_name: str :param managed_private_endpoint_name: Managed private endpoint name. :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. + :type managed_private_endpoint: ~data_factory_management_client.models.ManagedPrivateEndpointResource :param if_match: ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedPrivateEndpointResource, or the result of cls(response) :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -184,13 +178,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -235,9 +228,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -258,7 +254,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -300,9 +296,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -321,6 +320,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py index 2152988d7ef..908d7b58ffe 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_managed_virtual_network_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_managed_virtual_networks_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ManagedVirtualNetworkOperations: - """ManagedVirtualNetworkOperations async operations. +class ManagedVirtualNetworksOperations: + """ManagedVirtualNetworksOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -117,7 +120,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, managed_virtual_network_name: str, - properties: "models.ManagedVirtualNetwork", + managed_virtual_network: "models.ManagedVirtualNetworkResource", if_match: Optional[str] = None, **kwargs ) -> "models.ManagedVirtualNetworkResource": @@ -129,8 +132,8 @@ async def create_or_update( :type factory_name: str :param managed_virtual_network_name: Managed virtual network name. :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + :param managed_virtual_network: Managed Virtual Network resource definition. + :type managed_virtual_network: ~data_factory_management_client.models.ManagedVirtualNetworkResource :param if_match: ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -140,12 +143,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -166,13 +170,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -214,9 +217,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -236,7 +242,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py similarity index 90% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py index 83206d77039..8d96ffc136c 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_operation_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class OperationOperations: - """OperationOperations async operations. +class Operations: + """Operations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -53,14 +53,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py similarity index 84% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py index 5cdfd09fe01..8d4b4efdb99 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipeline_runs_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +18,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PipelineRunOperations: - """PipelineRunOperations async operations. +class PipelineRunsOperations: + """PipelineRunsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -45,11 +44,7 @@ async def query_by_factory( self, resource_group_name: str, factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, + filter_parameters: "models.RunFilterParameters", **kwargs ) -> "models.PipelineRunsQueryResponse": """Query pipeline runs in the factory based on input filter conditions. @@ -58,31 +53,21 @@ async def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineRunsQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -100,13 +85,12 @@ async def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -143,9 +127,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -163,7 +150,7 @@ async def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -206,9 +193,12 @@ async def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -228,6 +218,7 @@ async def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py similarity index 93% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py index 34c7453f951..1c73e154e35 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_pipeline_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_pipelines_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +19,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PipelineOperations: - """PipelineOperations async operations. +class PipelinesOperations: + """PipelinesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -59,14 +59,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -140,10 +143,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -164,13 +170,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -211,9 +216,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -233,7 +241,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -274,9 +282,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -294,6 +305,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -349,10 +361,13 @@ async def create_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_run.metadata['url'] # type: ignore @@ -379,7 +394,7 @@ async def create_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -388,7 +403,6 @@ async def create_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py similarity index 84% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py index 3401f9c95c1..7fbcbc61f39 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_run_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_trigger_runs_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime -from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +from typing import Any, Callable, Dict, Generic, Optional, TypeVar import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat @@ -19,8 +18,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class TriggerRunOperations: - """TriggerRunOperations async operations. +class TriggerRunsOperations: + """TriggerRunsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -65,9 +64,12 @@ async def rerun( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.rerun.metadata['url'] # type: ignore @@ -86,6 +88,7 @@ async def rerun( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -124,9 +127,12 @@ async def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -145,6 +151,7 @@ async def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -163,11 +170,7 @@ async def query_by_factory( self, resource_group_name: str, factory_name: str, - last_updated_after: datetime.datetime, - last_updated_before: datetime.datetime, - continuation_token_parameter: Optional[str] = None, - filters: Optional[List["models.RunQueryFilter"]] = None, - order_by: Optional[List["models.RunQueryOrderBy"]] = None, + filter_parameters: "models.RunFilterParameters", **kwargs ) -> "models.TriggerRunsQueryResponse": """Query trigger runs. @@ -176,31 +179,21 @@ async def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerRunsQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -218,13 +211,12 @@ async def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py similarity index 85% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py index f4669b45bc2..a9f7bd54c4d 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations/_triggers_operations.py @@ -9,7 +9,7 @@ import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod @@ -21,8 +21,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class TriggerOperations: - """TriggerOperations async operations. +class TriggersOperations: + """TriggersOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -61,14 +61,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -118,8 +121,7 @@ async def query_by_factory( self, resource_group_name: str, factory_name: str, - continuation_token_parameter: Optional[str] = None, - parent_trigger_name: Optional[str] = None, + filter_parameters: "models.TriggerFilterParameters", **kwargs ) -> "models.TriggerQueryResponse": """Query triggers. @@ -128,24 +130,21 @@ async def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str + :param filter_parameters: Parameters to filter the triggers. + :type filter_parameters: ~data_factory_management_client.models.TriggerFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.TriggerQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -163,13 +162,12 @@ async def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -190,7 +188,7 @@ async def create_or_update( resource_group_name: str, factory_name: str, trigger_name: str, - properties: "models.Trigger", + trigger: "models.TriggerResource", if_match: Optional[str] = None, **kwargs ) -> "models.TriggerResource": @@ -202,8 +200,8 @@ async def create_or_update( :type factory_name: str :param trigger_name: The trigger name. :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger + :param trigger: Trigger resource definition. + :type trigger: ~data_factory_management_client.models.TriggerResource :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -213,12 +211,13 @@ async def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -239,13 +238,12 @@ async def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -286,9 +284,12 @@ async def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -308,7 +309,7 @@ async def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -349,9 +350,12 @@ async def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -369,6 +373,7 @@ async def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -383,7 +388,7 @@ async def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - async def _subscribe_to_event_initial( + async def _subscribe_to_events_initial( self, resource_group_name: str, factory_name: str, @@ -391,12 +396,15 @@ async def _subscribe_to_event_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore + url = self._subscribe_to_events_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -411,7 +419,7 @@ async def _subscribe_to_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -429,9 +437,9 @@ async def _subscribe_to_event_initial( return cls(pipeline_response, deserialized, {}) return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + _subscribe_to_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - async def begin_subscribe_to_event( + async def begin_subscribe_to_events( self, resource_group_name: str, factory_name: str, @@ -464,7 +472,7 @@ async def begin_subscribe_to_event( ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = await self._subscribe_to_event_initial( + raw_result = await self._subscribe_to_events_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -482,7 +490,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -494,7 +509,7 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + begin_subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore async def get_event_subscription_status( self, @@ -517,9 +532,12 @@ async def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -537,7 +555,7 @@ async def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -555,7 +573,7 @@ async def get_event_subscription_status( return deserialized get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - async def _unsubscribe_from_event_initial( + async def _unsubscribe_from_events_initial( self, resource_group_name: str, factory_name: str, @@ -563,12 +581,15 @@ async def _unsubscribe_from_event_initial( **kwargs ) -> Optional["models.TriggerSubscriptionOperationStatus"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore + url = self._unsubscribe_from_events_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -583,7 +604,7 @@ async def _unsubscribe_from_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -601,9 +622,9 @@ async def _unsubscribe_from_event_initial( return cls(pipeline_response, deserialized, {}) return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + _unsubscribe_from_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - async def begin_unsubscribe_from_event( + async def begin_unsubscribe_from_events( self, resource_group_name: str, factory_name: str, @@ -636,7 +657,7 @@ async def begin_unsubscribe_from_event( ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = await self._unsubscribe_from_event_initial( + raw_result = await self._unsubscribe_from_events_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -654,7 +675,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -666,7 +694,7 @@ def get_long_running_output(pipeline_response): ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + begin_unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore async def _start_initial( self, @@ -676,9 +704,12 @@ async def _start_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -696,6 +727,7 @@ async def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -758,7 +790,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: @@ -780,9 +819,12 @@ async def _stop_initial( **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -800,6 +842,7 @@ async def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) @@ -862,7 +905,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py deleted file mode 100644 index 554e3ba9232..00000000000 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from ._operation_operations_async import OperationOperations -from ._factory_operations_async import FactoryOperations -from ._exposure_control_operations_async import ExposureControlOperations -from ._integration_runtime_operations_async import IntegrationRuntimeOperations -from ._integration_runtime_object_metadata_operations_async import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations_async import IntegrationRuntimeNodeOperations -from ._linked_service_operations_async import LinkedServiceOperations -from ._dataset_operations_async import DatasetOperations -from ._pipeline_operations_async import PipelineOperations -from ._pipeline_run_operations_async import PipelineRunOperations -from ._activity_run_operations_async import ActivityRunOperations -from ._trigger_operations_async import TriggerOperations -from ._trigger_run_operations_async import TriggerRunOperations -from ._data_flow_operations_async import DataFlowOperations -from ._data_flow_debug_session_operations_async import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations_async import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations_async import ManagedPrivateEndpointOperations - -__all__ = [ - 'OperationOperations', - 'FactoryOperations', - 'ExposureControlOperations', - 'IntegrationRuntimeOperations', - 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', - 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', -] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index 160afca0562..5c90796bfba 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -58,6 +58,12 @@ from ._models_py3 import AzureDataLakeStoreSink from ._models_py3 import AzureDataLakeStoreSource from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureFileStorageLinkedService from ._models_py3 import AzureFileStorageLocation @@ -67,35 +73,35 @@ from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService from ._models_py3 import AzureKeyVaultSecretReference - from ._models_py3 import AzureMariaDBLinkedService - from ._models_py3 import AzureMariaDBSource - from ._models_py3 import AzureMariaDBTableDataset + from ._models_py3 import AzureMariaDbLinkedService + from ._models_py3 import AzureMariaDbSource + from ._models_py3 import AzureMariaDbTableDataset from ._models_py3 import AzureMlBatchExecutionActivity from ._models_py3 import AzureMlExecutePipelineActivity from ._models_py3 import AzureMlLinkedService from ._models_py3 import AzureMlServiceLinkedService from ._models_py3 import AzureMlUpdateResourceActivity from ._models_py3 import AzureMlWebServiceFile - from ._models_py3 import AzureMySQLLinkedService - from ._models_py3 import AzureMySQLSink - from ._models_py3 import AzureMySQLSource - from ._models_py3 import AzureMySQLTableDataset - from ._models_py3 import AzurePostgreSQLLinkedService - from ._models_py3 import AzurePostgreSQLSink - from ._models_py3 import AzurePostgreSQLSource - from ._models_py3 import AzurePostgreSQLTableDataset + from ._models_py3 import AzureMySqlLinkedService + from ._models_py3 import AzureMySqlSink + from ._models_py3 import AzureMySqlSource + from ._models_py3 import AzureMySqlTableDataset + from ._models_py3 import AzurePostgreSqlLinkedService + from ._models_py3 import AzurePostgreSqlSink + from ._models_py3 import AzurePostgreSqlSource + from ._models_py3 import AzurePostgreSqlTableDataset from ._models_py3 import AzureQueueSink - from ._models_py3 import AzureSQLDWLinkedService - from ._models_py3 import AzureSQLDWTableDataset - from ._models_py3 import AzureSQLDatabaseLinkedService - from ._models_py3 import AzureSQLMiLinkedService - from ._models_py3 import AzureSQLMiTableDataset - from ._models_py3 import AzureSQLSink - from ._models_py3 import AzureSQLSource - from ._models_py3 import AzureSQLTableDataset from ._models_py3 import AzureSearchIndexDataset from ._models_py3 import AzureSearchIndexSink from ._models_py3 import AzureSearchLinkedService + from ._models_py3 import AzureSqlDatabaseLinkedService + from ._models_py3 import AzureSqlDwLinkedService + from ._models_py3 import AzureSqlDwTableDataset + from ._models_py3 import AzureSqlMiLinkedService + from ._models_py3 import AzureSqlMiTableDataset + from ._models_py3 import AzureSqlSink + from ._models_py3 import AzureSqlSource + from ._models_py3 import AzureSqlTableDataset from ._models_py3 import AzureStorageLinkedService from ._models_py3 import AzureTableDataset from ._models_py3 import AzureTableSink @@ -115,6 +121,7 @@ from ._models_py3 import ChainingTrigger from ._models_py3 import CloudError from ._models_py3 import CmdkeySetup + from ._models_py3 import CmkIdentityDefinition from ._models_py3 import CommonDataServiceForAppsEntityDataset from ._models_py3 import CommonDataServiceForAppsLinkedService from ._models_py3 import CommonDataServiceForAppsSink @@ -127,17 +134,18 @@ from ._models_py3 import ConnectionStateProperties from ._models_py3 import ControlActivity from ._models_py3 import CopyActivity + from ._models_py3 import CopyActivityLogSettings from ._models_py3 import CopySink from ._models_py3 import CopySource from ._models_py3 import CopyTranslator - from ._models_py3 import CosmosDBLinkedService - from ._models_py3 import CosmosDBMongoDBApiCollectionDataset - from ._models_py3 import CosmosDBMongoDBApiLinkedService - from ._models_py3 import CosmosDBMongoDBApiSink - from ._models_py3 import CosmosDBMongoDBApiSource - from ._models_py3 import CosmosDBSQLApiCollectionDataset - from ._models_py3 import CosmosDBSQLApiSink - from ._models_py3 import CosmosDBSQLApiSource + from ._models_py3 import CosmosDbLinkedService + from ._models_py3 import CosmosDbMongoDbApiCollectionDataset + from ._models_py3 import CosmosDbMongoDbApiLinkedService + from ._models_py3 import CosmosDbMongoDbApiSink + from ._models_py3 import CosmosDbMongoDbApiSource + from ._models_py3 import CosmosDbSqlApiCollectionDataset + from ._models_py3 import CosmosDbSqlApiSink + from ._models_py3 import CosmosDbSqlApiSource from ._models_py3 import CouchbaseLinkedService from ._models_py3 import CouchbaseSource from ._models_py3 import CouchbaseTableDataset @@ -149,14 +157,14 @@ from ._models_py3 import CustomActivityReferenceObject from ._models_py3 import CustomDataSourceLinkedService from ._models_py3 import CustomDataset + from ._models_py3 import CustomEventsTrigger from ._models_py3 import CustomSetupBase - from ._models_py3 import DWCopyCommandDefaultValue - from ._models_py3 import DWCopyCommandSettings from ._models_py3 import DataFlow from ._models_py3 import DataFlowDebugCommandPayload from ._models_py3 import DataFlowDebugCommandRequest from ._models_py3 import DataFlowDebugCommandResponse from ._models_py3 import DataFlowDebugPackage + from ._models_py3 import DataFlowDebugPackageDebugSettings from ._models_py3 import DataFlowDebugResource from ._models_py3 import DataFlowDebugSessionInfo from ._models_py3 import DataFlowFolder @@ -185,6 +193,8 @@ from ._models_py3 import DatasetResource from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -198,12 +208,14 @@ from ._models_py3 import DelimitedTextWriteSettings from ._models_py3 import DependencyReference from ._models_py3 import DistcpSettings - from ._models_py3 import DocumentDBCollectionDataset - from ._models_py3 import DocumentDBCollectionSink - from ._models_py3 import DocumentDBCollectionSource + from ._models_py3 import DocumentDbCollectionDataset + from ._models_py3 import DocumentDbCollectionSink + from ._models_py3 import DocumentDbCollectionSource from ._models_py3 import DrillLinkedService from ._models_py3 import DrillSource from ._models_py3 import DrillTableDataset + from ._models_py3 import DwCopyCommandDefaultValue + from ._models_py3 import DwCopyCommandSettings from ._models_py3 import DynamicsAxLinkedService from ._models_py3 import DynamicsAxResourceDataset from ._models_py3 import DynamicsAxSource @@ -218,6 +230,7 @@ from ._models_py3 import EloquaLinkedService from ._models_py3 import EloquaObjectDataset from ._models_py3 import EloquaSource + from ._models_py3 import EncryptionConfiguration from ._models_py3 import EntityReference from ._models_py3 import EnvironmentVariableSetup from ._models_py3 import ExcelDataset @@ -347,6 +360,8 @@ from ._models_py3 import LinkedServiceListResponse from ._models_py3 import LinkedServiceReference from ._models_py3 import LinkedServiceResource + from ._models_py3 import LogLocationSettings + from ._models_py3 import LogSettings from ._models_py3 import LogStorageSettings from ._models_py3 import LookupActivity from ._models_py3 import MagentoLinkedService @@ -362,11 +377,12 @@ from ._models_py3 import ManagedPrivateEndpointResource from ._models_py3 import ManagedVirtualNetwork from ._models_py3 import ManagedVirtualNetworkListResponse + from ._models_py3 import ManagedVirtualNetworkReference from ._models_py3 import ManagedVirtualNetworkResource from ._models_py3 import MappingDataFlow - from ._models_py3 import MariaDBLinkedService - from ._models_py3 import MariaDBSource - from ._models_py3 import MariaDBTableDataset + from ._models_py3 import MariaDbLinkedService + from ._models_py3 import MariaDbSource + from ._models_py3 import MariaDbTableDataset from ._models_py3 import MarketoLinkedService from ._models_py3 import MarketoObjectDataset from ._models_py3 import MarketoSource @@ -374,17 +390,20 @@ from ._models_py3 import MicrosoftAccessSink from ._models_py3 import MicrosoftAccessSource from ._models_py3 import MicrosoftAccessTableDataset - from ._models_py3 import MongoDBCollectionDataset - from ._models_py3 import MongoDBCursorMethodsProperties - from ._models_py3 import MongoDBLinkedService - from ._models_py3 import MongoDBSource - from ._models_py3 import MongoDBV2CollectionDataset - from ._models_py3 import MongoDBV2LinkedService - from ._models_py3 import MongoDBV2Source + from ._models_py3 import MongoDbAtlasCollectionDataset + from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSource + from ._models_py3 import MongoDbCollectionDataset + from ._models_py3 import MongoDbCursorMethodsProperties + from ._models_py3 import MongoDbLinkedService + from ._models_py3 import MongoDbSource + from ._models_py3 import MongoDbV2CollectionDataset + from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger - from ._models_py3 import MySQLLinkedService - from ._models_py3 import MySQLSource - from ._models_py3 import MySQLTableDataset + from ._models_py3 import MySqlLinkedService + from ._models_py3 import MySqlSource + from ._models_py3 import MySqlTableDataset from ._models_py3 import NetezzaLinkedService from ._models_py3 import NetezzaPartitionSettings from ._models_py3 import NetezzaSource @@ -419,28 +438,33 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource from ._models_py3 import PhoenixLinkedService from ._models_py3 import PhoenixObjectDataset from ._models_py3 import PhoenixSource + from ._models_py3 import PipelineElapsedTimeMetricPolicy + from ._models_py3 import PipelineFolder from ._models_py3 import PipelineListResponse + from ._models_py3 import PipelinePolicy from ._models_py3 import PipelineReference from ._models_py3 import PipelineResource from ._models_py3 import PipelineRun from ._models_py3 import PipelineRunInvokedBy from ._models_py3 import PipelineRunsQueryResponse from ._models_py3 import PolybaseSettings - from ._models_py3 import PostgreSQLLinkedService - from ._models_py3 import PostgreSQLSource - from ._models_py3 import PostgreSQLTableDataset + from ._models_py3 import PostgreSqlLinkedService + from ._models_py3 import PostgreSqlSource + from ._models_py3 import PostgreSqlTableDataset from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset from ._models_py3 import PrestoSource @@ -467,18 +491,6 @@ from ._models_py3 import RunFilterParameters from ._models_py3 import RunQueryFilter from ._models_py3 import RunQueryOrderBy - from ._models_py3 import SQLDWSink - from ._models_py3 import SQLDWSource - from ._models_py3 import SQLMiSink - from ._models_py3 import SQLMiSource - from ._models_py3 import SQLPartitionSettings - from ._models_py3 import SQLServerLinkedService - from ._models_py3 import SQLServerSink - from ._models_py3 import SQLServerSource - from ._models_py3 import SQLServerStoredProcedureActivity - from ._models_py3 import SQLServerTableDataset - from ._models_py3 import SQLSink - from ._models_py3 import SQLSource from ._models_py3 import SalesforceLinkedService from ._models_py3 import SalesforceMarketingCloudLinkedService from ._models_py3 import SalesforceMarketingCloudObjectDataset @@ -544,6 +556,18 @@ from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource + from ._models_py3 import SqlDwSink + from ._models_py3 import SqlDwSource + from ._models_py3 import SqlMiSink + from ._models_py3 import SqlMiSource + from ._models_py3 import SqlPartitionSettings + from ._models_py3 import SqlServerLinkedService + from ._models_py3 import SqlServerSink + from ._models_py3 import SqlServerSource + from ._models_py3 import SqlServerStoredProcedureActivity + from ._models_py3 import SqlServerTableDataset + from ._models_py3 import SqlSink + from ._models_py3 import SqlSource from ._models_py3 import SquareLinkedService from ._models_py3 import SquareObjectDataset from ._models_py3 import SquareSource @@ -577,6 +601,8 @@ from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -680,6 +706,12 @@ from ._models import AzureDataLakeStoreSink # type: ignore from ._models import AzureDataLakeStoreSource # type: ignore from ._models import AzureDataLakeStoreWriteSettings # type: ignore + from ._models import AzureDatabricksDeltaLakeDataset # type: ignore + from ._models import AzureDatabricksDeltaLakeExportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeImportCommand # type: ignore + from ._models import AzureDatabricksDeltaLakeLinkedService # type: ignore + from ._models import AzureDatabricksDeltaLakeSink # type: ignore + from ._models import AzureDatabricksDeltaLakeSource # type: ignore from ._models import AzureDatabricksLinkedService # type: ignore from ._models import AzureFileStorageLinkedService # type: ignore from ._models import AzureFileStorageLocation # type: ignore @@ -689,35 +721,35 @@ from ._models import AzureFunctionLinkedService # type: ignore from ._models import AzureKeyVaultLinkedService # type: ignore from ._models import AzureKeyVaultSecretReference # type: ignore - from ._models import AzureMariaDBLinkedService # type: ignore - from ._models import AzureMariaDBSource # type: ignore - from ._models import AzureMariaDBTableDataset # type: ignore + from ._models import AzureMariaDbLinkedService # type: ignore + from ._models import AzureMariaDbSource # type: ignore + from ._models import AzureMariaDbTableDataset # type: ignore from ._models import AzureMlBatchExecutionActivity # type: ignore from ._models import AzureMlExecutePipelineActivity # type: ignore from ._models import AzureMlLinkedService # type: ignore from ._models import AzureMlServiceLinkedService # type: ignore from ._models import AzureMlUpdateResourceActivity # type: ignore from ._models import AzureMlWebServiceFile # type: ignore - from ._models import AzureMySQLLinkedService # type: ignore - from ._models import AzureMySQLSink # type: ignore - from ._models import AzureMySQLSource # type: ignore - from ._models import AzureMySQLTableDataset # type: ignore - from ._models import AzurePostgreSQLLinkedService # type: ignore - from ._models import AzurePostgreSQLSink # type: ignore - from ._models import AzurePostgreSQLSource # type: ignore - from ._models import AzurePostgreSQLTableDataset # type: ignore + from ._models import AzureMySqlLinkedService # type: ignore + from ._models import AzureMySqlSink # type: ignore + from ._models import AzureMySqlSource # type: ignore + from ._models import AzureMySqlTableDataset # type: ignore + from ._models import AzurePostgreSqlLinkedService # type: ignore + from ._models import AzurePostgreSqlSink # type: ignore + from ._models import AzurePostgreSqlSource # type: ignore + from ._models import AzurePostgreSqlTableDataset # type: ignore from ._models import AzureQueueSink # type: ignore - from ._models import AzureSQLDWLinkedService # type: ignore - from ._models import AzureSQLDWTableDataset # type: ignore - from ._models import AzureSQLDatabaseLinkedService # type: ignore - from ._models import AzureSQLMiLinkedService # type: ignore - from ._models import AzureSQLMiTableDataset # type: ignore - from ._models import AzureSQLSink # type: ignore - from ._models import AzureSQLSource # type: ignore - from ._models import AzureSQLTableDataset # type: ignore from ._models import AzureSearchIndexDataset # type: ignore from ._models import AzureSearchIndexSink # type: ignore from ._models import AzureSearchLinkedService # type: ignore + from ._models import AzureSqlDatabaseLinkedService # type: ignore + from ._models import AzureSqlDwLinkedService # type: ignore + from ._models import AzureSqlDwTableDataset # type: ignore + from ._models import AzureSqlMiLinkedService # type: ignore + from ._models import AzureSqlMiTableDataset # type: ignore + from ._models import AzureSqlSink # type: ignore + from ._models import AzureSqlSource # type: ignore + from ._models import AzureSqlTableDataset # type: ignore from ._models import AzureStorageLinkedService # type: ignore from ._models import AzureTableDataset # type: ignore from ._models import AzureTableSink # type: ignore @@ -737,6 +769,7 @@ from ._models import ChainingTrigger # type: ignore from ._models import CloudError # type: ignore from ._models import CmdkeySetup # type: ignore + from ._models import CmkIdentityDefinition # type: ignore from ._models import CommonDataServiceForAppsEntityDataset # type: ignore from ._models import CommonDataServiceForAppsLinkedService # type: ignore from ._models import CommonDataServiceForAppsSink # type: ignore @@ -749,17 +782,18 @@ from ._models import ConnectionStateProperties # type: ignore from ._models import ControlActivity # type: ignore from ._models import CopyActivity # type: ignore + from ._models import CopyActivityLogSettings # type: ignore from ._models import CopySink # type: ignore from ._models import CopySource # type: ignore from ._models import CopyTranslator # type: ignore - from ._models import CosmosDBLinkedService # type: ignore - from ._models import CosmosDBMongoDBApiCollectionDataset # type: ignore - from ._models import CosmosDBMongoDBApiLinkedService # type: ignore - from ._models import CosmosDBMongoDBApiSink # type: ignore - from ._models import CosmosDBMongoDBApiSource # type: ignore - from ._models import CosmosDBSQLApiCollectionDataset # type: ignore - from ._models import CosmosDBSQLApiSink # type: ignore - from ._models import CosmosDBSQLApiSource # type: ignore + from ._models import CosmosDbLinkedService # type: ignore + from ._models import CosmosDbMongoDbApiCollectionDataset # type: ignore + from ._models import CosmosDbMongoDbApiLinkedService # type: ignore + from ._models import CosmosDbMongoDbApiSink # type: ignore + from ._models import CosmosDbMongoDbApiSource # type: ignore + from ._models import CosmosDbSqlApiCollectionDataset # type: ignore + from ._models import CosmosDbSqlApiSink # type: ignore + from ._models import CosmosDbSqlApiSource # type: ignore from ._models import CouchbaseLinkedService # type: ignore from ._models import CouchbaseSource # type: ignore from ._models import CouchbaseTableDataset # type: ignore @@ -771,14 +805,14 @@ from ._models import CustomActivityReferenceObject # type: ignore from ._models import CustomDataSourceLinkedService # type: ignore from ._models import CustomDataset # type: ignore + from ._models import CustomEventsTrigger # type: ignore from ._models import CustomSetupBase # type: ignore - from ._models import DWCopyCommandDefaultValue # type: ignore - from ._models import DWCopyCommandSettings # type: ignore from ._models import DataFlow # type: ignore from ._models import DataFlowDebugCommandPayload # type: ignore from ._models import DataFlowDebugCommandRequest # type: ignore from ._models import DataFlowDebugCommandResponse # type: ignore from ._models import DataFlowDebugPackage # type: ignore + from ._models import DataFlowDebugPackageDebugSettings # type: ignore from ._models import DataFlowDebugResource # type: ignore from ._models import DataFlowDebugSessionInfo # type: ignore from ._models import DataFlowFolder # type: ignore @@ -807,6 +841,8 @@ from ._models import DatasetResource # type: ignore from ._models import DatasetSchemaDataElement # type: ignore from ._models import DatasetStorageFormat # type: ignore + from ._models import DatasetTarCompression # type: ignore + from ._models import DatasetTarGZipCompression # type: ignore from ._models import DatasetZipDeflateCompression # type: ignore from ._models import Db2LinkedService # type: ignore from ._models import Db2Source # type: ignore @@ -820,12 +856,14 @@ from ._models import DelimitedTextWriteSettings # type: ignore from ._models import DependencyReference # type: ignore from ._models import DistcpSettings # type: ignore - from ._models import DocumentDBCollectionDataset # type: ignore - from ._models import DocumentDBCollectionSink # type: ignore - from ._models import DocumentDBCollectionSource # type: ignore + from ._models import DocumentDbCollectionDataset # type: ignore + from ._models import DocumentDbCollectionSink # type: ignore + from ._models import DocumentDbCollectionSource # type: ignore from ._models import DrillLinkedService # type: ignore from ._models import DrillSource # type: ignore from ._models import DrillTableDataset # type: ignore + from ._models import DwCopyCommandDefaultValue # type: ignore + from ._models import DwCopyCommandSettings # type: ignore from ._models import DynamicsAxLinkedService # type: ignore from ._models import DynamicsAxResourceDataset # type: ignore from ._models import DynamicsAxSource # type: ignore @@ -840,6 +878,7 @@ from ._models import EloquaLinkedService # type: ignore from ._models import EloquaObjectDataset # type: ignore from ._models import EloquaSource # type: ignore + from ._models import EncryptionConfiguration # type: ignore from ._models import EntityReference # type: ignore from ._models import EnvironmentVariableSetup # type: ignore from ._models import ExcelDataset # type: ignore @@ -969,6 +1008,8 @@ from ._models import LinkedServiceListResponse # type: ignore from ._models import LinkedServiceReference # type: ignore from ._models import LinkedServiceResource # type: ignore + from ._models import LogLocationSettings # type: ignore + from ._models import LogSettings # type: ignore from ._models import LogStorageSettings # type: ignore from ._models import LookupActivity # type: ignore from ._models import MagentoLinkedService # type: ignore @@ -984,11 +1025,12 @@ from ._models import ManagedPrivateEndpointResource # type: ignore from ._models import ManagedVirtualNetwork # type: ignore from ._models import ManagedVirtualNetworkListResponse # type: ignore + from ._models import ManagedVirtualNetworkReference # type: ignore from ._models import ManagedVirtualNetworkResource # type: ignore from ._models import MappingDataFlow # type: ignore - from ._models import MariaDBLinkedService # type: ignore - from ._models import MariaDBSource # type: ignore - from ._models import MariaDBTableDataset # type: ignore + from ._models import MariaDbLinkedService # type: ignore + from ._models import MariaDbSource # type: ignore + from ._models import MariaDbTableDataset # type: ignore from ._models import MarketoLinkedService # type: ignore from ._models import MarketoObjectDataset # type: ignore from ._models import MarketoSource # type: ignore @@ -996,17 +1038,20 @@ from ._models import MicrosoftAccessSink # type: ignore from ._models import MicrosoftAccessSource # type: ignore from ._models import MicrosoftAccessTableDataset # type: ignore - from ._models import MongoDBCollectionDataset # type: ignore - from ._models import MongoDBCursorMethodsProperties # type: ignore - from ._models import MongoDBLinkedService # type: ignore - from ._models import MongoDBSource # type: ignore - from ._models import MongoDBV2CollectionDataset # type: ignore - from ._models import MongoDBV2LinkedService # type: ignore - from ._models import MongoDBV2Source # type: ignore + from ._models import MongoDbAtlasCollectionDataset # type: ignore + from ._models import MongoDbAtlasLinkedService # type: ignore + from ._models import MongoDbAtlasSource # type: ignore + from ._models import MongoDbCollectionDataset # type: ignore + from ._models import MongoDbCursorMethodsProperties # type: ignore + from ._models import MongoDbLinkedService # type: ignore + from ._models import MongoDbSource # type: ignore + from ._models import MongoDbV2CollectionDataset # type: ignore + from ._models import MongoDbV2LinkedService # type: ignore + from ._models import MongoDbV2Source # type: ignore from ._models import MultiplePipelineTrigger # type: ignore - from ._models import MySQLLinkedService # type: ignore - from ._models import MySQLSource # type: ignore - from ._models import MySQLTableDataset # type: ignore + from ._models import MySqlLinkedService # type: ignore + from ._models import MySqlSource # type: ignore + from ._models import MySqlTableDataset # type: ignore from ._models import NetezzaLinkedService # type: ignore from ._models import NetezzaPartitionSettings # type: ignore from ._models import NetezzaSource # type: ignore @@ -1041,28 +1086,33 @@ from ._models import OrcFormat # type: ignore from ._models import OrcSink # type: ignore from ._models import OrcSource # type: ignore + from ._models import OrcWriteSettings # type: ignore from ._models import PackageStore # type: ignore from ._models import ParameterSpecification # type: ignore from ._models import ParquetDataset # type: ignore from ._models import ParquetFormat # type: ignore from ._models import ParquetSink # type: ignore from ._models import ParquetSource # type: ignore + from ._models import ParquetWriteSettings # type: ignore from ._models import PaypalLinkedService # type: ignore from ._models import PaypalObjectDataset # type: ignore from ._models import PaypalSource # type: ignore from ._models import PhoenixLinkedService # type: ignore from ._models import PhoenixObjectDataset # type: ignore from ._models import PhoenixSource # type: ignore + from ._models import PipelineElapsedTimeMetricPolicy # type: ignore + from ._models import PipelineFolder # type: ignore from ._models import PipelineListResponse # type: ignore + from ._models import PipelinePolicy # type: ignore from ._models import PipelineReference # type: ignore from ._models import PipelineResource # type: ignore from ._models import PipelineRun # type: ignore from ._models import PipelineRunInvokedBy # type: ignore from ._models import PipelineRunsQueryResponse # type: ignore from ._models import PolybaseSettings # type: ignore - from ._models import PostgreSQLLinkedService # type: ignore - from ._models import PostgreSQLSource # type: ignore - from ._models import PostgreSQLTableDataset # type: ignore + from ._models import PostgreSqlLinkedService # type: ignore + from ._models import PostgreSqlSource # type: ignore + from ._models import PostgreSqlTableDataset # type: ignore from ._models import PrestoLinkedService # type: ignore from ._models import PrestoObjectDataset # type: ignore from ._models import PrestoSource # type: ignore @@ -1089,18 +1139,6 @@ from ._models import RunFilterParameters # type: ignore from ._models import RunQueryFilter # type: ignore from ._models import RunQueryOrderBy # type: ignore - from ._models import SQLDWSink # type: ignore - from ._models import SQLDWSource # type: ignore - from ._models import SQLMiSink # type: ignore - from ._models import SQLMiSource # type: ignore - from ._models import SQLPartitionSettings # type: ignore - from ._models import SQLServerLinkedService # type: ignore - from ._models import SQLServerSink # type: ignore - from ._models import SQLServerSource # type: ignore - from ._models import SQLServerStoredProcedureActivity # type: ignore - from ._models import SQLServerTableDataset # type: ignore - from ._models import SQLSink # type: ignore - from ._models import SQLSource # type: ignore from ._models import SalesforceLinkedService # type: ignore from ._models import SalesforceMarketingCloudLinkedService # type: ignore from ._models import SalesforceMarketingCloudObjectDataset # type: ignore @@ -1166,6 +1204,18 @@ from ._models import SparkLinkedService # type: ignore from ._models import SparkObjectDataset # type: ignore from ._models import SparkSource # type: ignore + from ._models import SqlDwSink # type: ignore + from ._models import SqlDwSource # type: ignore + from ._models import SqlMiSink # type: ignore + from ._models import SqlMiSource # type: ignore + from ._models import SqlPartitionSettings # type: ignore + from ._models import SqlServerLinkedService # type: ignore + from ._models import SqlServerSink # type: ignore + from ._models import SqlServerSource # type: ignore + from ._models import SqlServerStoredProcedureActivity # type: ignore + from ._models import SqlServerTableDataset # type: ignore + from ._models import SqlSink # type: ignore + from ._models import SqlSource # type: ignore from ._models import SquareLinkedService # type: ignore from ._models import SquareObjectDataset # type: ignore from ._models import SquareSource # type: ignore @@ -1199,6 +1249,8 @@ from ._models import SybaseTableDataset # type: ignore from ._models import TabularSource # type: ignore from ._models import TabularTranslator # type: ignore + from ._models import TarGZipReadSettings # type: ignore + from ._models import TarReadSettings # type: ignore from ._models import TeradataLinkedService # type: ignore from ._models import TeradataPartitionSettings # type: ignore from ._models import TeradataSource # type: ignore @@ -1296,9 +1348,9 @@ JsonFormatFilePattern, JsonWriteFilePattern, ManagedIntegrationRuntimeNodeStatus, - MongoDBAuthenticationType, + MongoDbAuthenticationType, NetezzaPartitionOption, - ODataAADServicePrincipalCredentialType, + ODataAadServicePrincipalCredentialType, ODataAuthenticationType, OraclePartitionOption, OrcCompressionCodec, @@ -1306,13 +1358,13 @@ PhoenixAuthenticationType, PolybaseSettingsRejectType, PrestoAuthenticationType, + PublicNetworkAccess, RecurrenceFrequency, RestServiceAuthenticationType, RunQueryFilterOperand, RunQueryFilterOperator, RunQueryOrder, RunQueryOrderByField, - SQLPartitionOption, SalesforceSinkWriteBehavior, SalesforceSourceReadBehavior, SapCloudForCustomerSinkWriteBehavior, @@ -1325,6 +1377,7 @@ SparkAuthenticationType, SparkServerType, SparkThriftTransportProtocol, + SqlPartitionOption, SsisLogLocationType, SsisObjectMetadataType, SsisPackageLocationType, @@ -1393,6 +1446,12 @@ 'AzureDataLakeStoreSink', 'AzureDataLakeStoreSource', 'AzureDataLakeStoreWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureFileStorageLinkedService', 'AzureFileStorageLocation', @@ -1402,35 +1461,35 @@ 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', 'AzureKeyVaultSecretReference', - 'AzureMariaDBLinkedService', - 'AzureMariaDBSource', - 'AzureMariaDBTableDataset', + 'AzureMariaDbLinkedService', + 'AzureMariaDbSource', + 'AzureMariaDbTableDataset', 'AzureMlBatchExecutionActivity', 'AzureMlExecutePipelineActivity', 'AzureMlLinkedService', 'AzureMlServiceLinkedService', 'AzureMlUpdateResourceActivity', 'AzureMlWebServiceFile', - 'AzureMySQLLinkedService', - 'AzureMySQLSink', - 'AzureMySQLSource', - 'AzureMySQLTableDataset', - 'AzurePostgreSQLLinkedService', - 'AzurePostgreSQLSink', - 'AzurePostgreSQLSource', - 'AzurePostgreSQLTableDataset', + 'AzureMySqlLinkedService', + 'AzureMySqlSink', + 'AzureMySqlSource', + 'AzureMySqlTableDataset', + 'AzurePostgreSqlLinkedService', + 'AzurePostgreSqlSink', + 'AzurePostgreSqlSource', + 'AzurePostgreSqlTableDataset', 'AzureQueueSink', - 'AzureSQLDWLinkedService', - 'AzureSQLDWTableDataset', - 'AzureSQLDatabaseLinkedService', - 'AzureSQLMiLinkedService', - 'AzureSQLMiTableDataset', - 'AzureSQLSink', - 'AzureSQLSource', - 'AzureSQLTableDataset', 'AzureSearchIndexDataset', 'AzureSearchIndexSink', 'AzureSearchLinkedService', + 'AzureSqlDatabaseLinkedService', + 'AzureSqlDwLinkedService', + 'AzureSqlDwTableDataset', + 'AzureSqlMiLinkedService', + 'AzureSqlMiTableDataset', + 'AzureSqlSink', + 'AzureSqlSource', + 'AzureSqlTableDataset', 'AzureStorageLinkedService', 'AzureTableDataset', 'AzureTableSink', @@ -1450,6 +1509,7 @@ 'ChainingTrigger', 'CloudError', 'CmdkeySetup', + 'CmkIdentityDefinition', 'CommonDataServiceForAppsEntityDataset', 'CommonDataServiceForAppsLinkedService', 'CommonDataServiceForAppsSink', @@ -1462,17 +1522,18 @@ 'ConnectionStateProperties', 'ControlActivity', 'CopyActivity', + 'CopyActivityLogSettings', 'CopySink', 'CopySource', 'CopyTranslator', - 'CosmosDBLinkedService', - 'CosmosDBMongoDBApiCollectionDataset', - 'CosmosDBMongoDBApiLinkedService', - 'CosmosDBMongoDBApiSink', - 'CosmosDBMongoDBApiSource', - 'CosmosDBSQLApiCollectionDataset', - 'CosmosDBSQLApiSink', - 'CosmosDBSQLApiSource', + 'CosmosDbLinkedService', + 'CosmosDbMongoDbApiCollectionDataset', + 'CosmosDbMongoDbApiLinkedService', + 'CosmosDbMongoDbApiSink', + 'CosmosDbMongoDbApiSource', + 'CosmosDbSqlApiCollectionDataset', + 'CosmosDbSqlApiSink', + 'CosmosDbSqlApiSource', 'CouchbaseLinkedService', 'CouchbaseSource', 'CouchbaseTableDataset', @@ -1484,14 +1545,14 @@ 'CustomActivityReferenceObject', 'CustomDataSourceLinkedService', 'CustomDataset', + 'CustomEventsTrigger', 'CustomSetupBase', - 'DWCopyCommandDefaultValue', - 'DWCopyCommandSettings', 'DataFlow', 'DataFlowDebugCommandPayload', 'DataFlowDebugCommandRequest', 'DataFlowDebugCommandResponse', 'DataFlowDebugPackage', + 'DataFlowDebugPackageDebugSettings', 'DataFlowDebugResource', 'DataFlowDebugSessionInfo', 'DataFlowFolder', @@ -1520,6 +1581,8 @@ 'DatasetResource', 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1533,12 +1596,14 @@ 'DelimitedTextWriteSettings', 'DependencyReference', 'DistcpSettings', - 'DocumentDBCollectionDataset', - 'DocumentDBCollectionSink', - 'DocumentDBCollectionSource', + 'DocumentDbCollectionDataset', + 'DocumentDbCollectionSink', + 'DocumentDbCollectionSource', 'DrillLinkedService', 'DrillSource', 'DrillTableDataset', + 'DwCopyCommandDefaultValue', + 'DwCopyCommandSettings', 'DynamicsAxLinkedService', 'DynamicsAxResourceDataset', 'DynamicsAxSource', @@ -1553,6 +1618,7 @@ 'EloquaLinkedService', 'EloquaObjectDataset', 'EloquaSource', + 'EncryptionConfiguration', 'EntityReference', 'EnvironmentVariableSetup', 'ExcelDataset', @@ -1682,6 +1748,8 @@ 'LinkedServiceListResponse', 'LinkedServiceReference', 'LinkedServiceResource', + 'LogLocationSettings', + 'LogSettings', 'LogStorageSettings', 'LookupActivity', 'MagentoLinkedService', @@ -1697,11 +1765,12 @@ 'ManagedPrivateEndpointResource', 'ManagedVirtualNetwork', 'ManagedVirtualNetworkListResponse', + 'ManagedVirtualNetworkReference', 'ManagedVirtualNetworkResource', 'MappingDataFlow', - 'MariaDBLinkedService', - 'MariaDBSource', - 'MariaDBTableDataset', + 'MariaDbLinkedService', + 'MariaDbSource', + 'MariaDbTableDataset', 'MarketoLinkedService', 'MarketoObjectDataset', 'MarketoSource', @@ -1709,17 +1778,20 @@ 'MicrosoftAccessSink', 'MicrosoftAccessSource', 'MicrosoftAccessTableDataset', - 'MongoDBCollectionDataset', - 'MongoDBCursorMethodsProperties', - 'MongoDBLinkedService', - 'MongoDBSource', - 'MongoDBV2CollectionDataset', - 'MongoDBV2LinkedService', - 'MongoDBV2Source', + 'MongoDbAtlasCollectionDataset', + 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSource', + 'MongoDbCollectionDataset', + 'MongoDbCursorMethodsProperties', + 'MongoDbLinkedService', + 'MongoDbSource', + 'MongoDbV2CollectionDataset', + 'MongoDbV2LinkedService', + 'MongoDbV2Source', 'MultiplePipelineTrigger', - 'MySQLLinkedService', - 'MySQLSource', - 'MySQLTableDataset', + 'MySqlLinkedService', + 'MySqlSource', + 'MySqlTableDataset', 'NetezzaLinkedService', 'NetezzaPartitionSettings', 'NetezzaSource', @@ -1754,28 +1826,33 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', 'PhoenixLinkedService', 'PhoenixObjectDataset', 'PhoenixSource', + 'PipelineElapsedTimeMetricPolicy', + 'PipelineFolder', 'PipelineListResponse', + 'PipelinePolicy', 'PipelineReference', 'PipelineResource', 'PipelineRun', 'PipelineRunInvokedBy', 'PipelineRunsQueryResponse', 'PolybaseSettings', - 'PostgreSQLLinkedService', - 'PostgreSQLSource', - 'PostgreSQLTableDataset', + 'PostgreSqlLinkedService', + 'PostgreSqlSource', + 'PostgreSqlTableDataset', 'PrestoLinkedService', 'PrestoObjectDataset', 'PrestoSource', @@ -1802,18 +1879,6 @@ 'RunFilterParameters', 'RunQueryFilter', 'RunQueryOrderBy', - 'SQLDWSink', - 'SQLDWSource', - 'SQLMiSink', - 'SQLMiSource', - 'SQLPartitionSettings', - 'SQLServerLinkedService', - 'SQLServerSink', - 'SQLServerSource', - 'SQLServerStoredProcedureActivity', - 'SQLServerTableDataset', - 'SQLSink', - 'SQLSource', 'SalesforceLinkedService', 'SalesforceMarketingCloudLinkedService', 'SalesforceMarketingCloudObjectDataset', @@ -1879,6 +1944,18 @@ 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', + 'SqlDwSink', + 'SqlDwSource', + 'SqlMiSink', + 'SqlMiSource', + 'SqlPartitionSettings', + 'SqlServerLinkedService', + 'SqlServerSink', + 'SqlServerSource', + 'SqlServerStoredProcedureActivity', + 'SqlServerTableDataset', + 'SqlSink', + 'SqlSource', 'SquareLinkedService', 'SquareObjectDataset', 'SquareSource', @@ -1912,6 +1989,8 @@ 'SybaseTableDataset', 'TabularSource', 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -2007,9 +2086,9 @@ 'JsonFormatFilePattern', 'JsonWriteFilePattern', 'ManagedIntegrationRuntimeNodeStatus', - 'MongoDBAuthenticationType', + 'MongoDbAuthenticationType', 'NetezzaPartitionOption', - 'ODataAADServicePrincipalCredentialType', + 'ODataAadServicePrincipalCredentialType', 'ODataAuthenticationType', 'OraclePartitionOption', 'OrcCompressionCodec', @@ -2017,13 +2096,13 @@ 'PhoenixAuthenticationType', 'PolybaseSettingsRejectType', 'PrestoAuthenticationType', + 'PublicNetworkAccess', 'RecurrenceFrequency', 'RestServiceAuthenticationType', 'RunQueryFilterOperand', 'RunQueryFilterOperator', 'RunQueryOrder', 'RunQueryOrderByField', - 'SQLPartitionOption', 'SalesforceSinkWriteBehavior', 'SalesforceSourceReadBehavior', 'SapCloudForCustomerSinkWriteBehavior', @@ -2036,6 +2115,7 @@ 'SparkAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', + 'SqlPartitionOption', 'SsisLogLocationType', 'SsisObjectMetadataType', 'SsisPackageLocationType', diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py index d663167e0f4..f2857552ae9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_data_factory_management_client_enums.py @@ -86,6 +86,8 @@ class CompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): DEFLATE = "deflate" ZIP_DEFLATE = "zipDeflate" LZ4 = "lz4" + TAR = "tar" + TAR_G_ZIP = "tarGZip" class CopyBehaviorType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """All available types of copy behavior. @@ -183,7 +185,7 @@ class DynamicsServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnum SERVICE_PRINCIPAL_CERT = "ServicePrincipalCert" class DynamicsSinkWriteBehavior(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The write behavior for the operation. + """Defines values for DynamicsSinkWriteBehavior. """ UPSERT = "Upsert" @@ -406,7 +408,7 @@ class ManagedIntegrationRuntimeNodeStatus(with_metaclass(_CaseInsensitiveEnumMet RECYCLING = "Recycling" UNAVAILABLE = "Unavailable" -class MongoDBAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class MongoDbAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The authentication type to be used to connect to the MongoDB database. """ @@ -421,7 +423,7 @@ class NetezzaPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum) DATA_SLICE = "DataSlice" DYNAMIC_RANGE = "DynamicRange" -class ODataAADServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class ODataAadServicePrincipalCredentialType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Specify the credential type (key or cert) is used for service principal. """ @@ -451,6 +453,7 @@ class OrcCompressionCodec(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): NONE = "none" ZLIB = "zlib" SNAPPY = "snappy" + LZO = "lzo" class ParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Parameter type. @@ -486,6 +489,13 @@ class PrestoAuthenticationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enu ANONYMOUS = "Anonymous" LDAP = "LDAP" +class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """Whether or not public network access is allowed for the data factory. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + class RecurrenceFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Enumerates possible frequency option for the schedule trigger. """ @@ -658,7 +668,7 @@ class SparkThriftTransportProtocol(with_metaclass(_CaseInsensitiveEnumMeta, str, SASL = "SASL" HTTP = "HTTP " -class SQLPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): +class SqlPartitionOption(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """The partition mechanism that will be used for Sql read in parallel. """ @@ -746,6 +756,7 @@ class TumblingWindowFrequency(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum MINUTE = "Minute" HOUR = "Hour" + MONTH = "Month" class VariableType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): """Variable type. diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index ec6d27ddb59..70bcf0b20f2 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -342,7 +342,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -375,7 +375,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -489,7 +489,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -535,7 +535,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -616,7 +616,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -649,7 +649,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -668,7 +668,7 @@ class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySQLSource, AzurePostgreSQLSource, AzureSQLSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySQLSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSQLSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SQLDWSource, SQLMiSource, SQLServerSource, SQLSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDbSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDbSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDwSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. All required parameters must be populated in order to send to Azure. @@ -709,7 +709,7 @@ class TabularSource(CopySource): } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySQLSource', 'AzurePostgreSqlSource': 'AzurePostgreSQLSource', 'AzureSqlSource': 'AzureSQLSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySQLSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSQLSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SQLDWSource', 'SqlMISource': 'SQLMiSource', 'SqlServerSource': 'SQLServerSource', 'SqlSource': 'SQLSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDbSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDbSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDwSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} } def __init__( @@ -1088,6 +1088,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1098,6 +1101,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~data_factory_management_client.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1115,9 +1120,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1127,9 +1134,11 @@ def __init__( ): super(AmazonS3LinkedService, self).__init__(**kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = kwargs.get('authentication_type', None) self.access_key_id = kwargs.get('access_key_id', None) self.secret_access_key = kwargs.get('secret_access_key', None) self.service_url = kwargs.get('service_url', None) + self.session_token = kwargs.get('session_token', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) @@ -1555,7 +1564,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1596,7 +1605,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1724,7 +1733,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1745,7 +1754,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -1771,6 +1780,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -1782,6 +1798,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -1792,6 +1810,8 @@ def __init__( self.type = 'AvroWriteSettings' # type: str self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class CustomSetupBase(msrest.serialization.Model): @@ -2742,253 +2762,223 @@ def __init__( self.block_size_in_mb = kwargs.get('block_size_in_mb', None) -class AzureDatabricksLinkedService(LinkedService): - """Azure Databricks linked service. +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param type: Required. Type of linked service.Constant filled by server. + :param type: Required. Type of dataset.Constant filled by server. :type type: str - :param connect_via: The integration runtime reference. - :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference - :param description: Linked service description. + :param description: Dataset description. :type description: str - :param parameters: Parameters for linked service. + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] - :param annotations: List of tags that can be used for describing the linked service. + :param annotations: List of tags that can be used for describing the Dataset. :type annotations: list[object] - :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks - deployment. Type: string (or Expression with resultType string). - :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to - https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression - with resultType string). - :type access_token: ~data_factory_management_client.models.SecretBase - :param existing_cluster_id: The id of an existing interactive cluster that will be used for all - runs of this activity. Type: string (or Expression with resultType string). - :type existing_cluster_id: object - :param instance_pool_id: The id of an existing instance pool that will be used for all runs of - this activity. Type: string (or Expression with resultType string). - :type instance_pool_id: object - :param new_cluster_version: If not using an existing interactive cluster, this specifies the - Spark version of a new job cluster or instance pool nodes created for each run of this - activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType string). - :type new_cluster_version: object - :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies - the number of worker nodes to use for the new job cluster or instance pool. For new job - clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- - scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can - only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is - specified. Type: string (or Expression with resultType string). - :type new_cluster_num_of_worker: object - :param new_cluster_node_type: The node type of the new job cluster. This property is required - if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is - specified, this property is ignored. Type: string (or Expression with resultType string). - :type new_cluster_node_type: object - :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value - pairs. - :type new_cluster_spark_conf: dict[str, object] - :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment - variables key-value pairs. - :type new_cluster_spark_env_vars: dict[str, object] - :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored - in instance pool configurations. - :type new_cluster_custom_tags: dict[str, object] - :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and - event logs. Type: string (or Expression with resultType string). - :type new_cluster_log_destination: object - :param new_cluster_driver_node_type: The driver node type for the new job cluster. This - property is ignored in instance pool configurations. Type: string (or Expression with - resultType string). - :type new_cluster_driver_node_type: object - :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: - array of strings (or Expression with resultType array of strings). - :type new_cluster_init_scripts: object - :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This - property is now ignored, and takes the default elastic disk behavior in Databricks (elastic - disks are always enabled). Type: boolean (or Expression with resultType boolean). - :type new_cluster_enable_elastic_disk: object - :param encrypted_credential: The encrypted credential used for authentication. Credentials are - encrypted using the integration runtime credential manager. Type: string (or Expression with - resultType string). - :type encrypted_credential: object + :type database: object """ _validation = { 'type': {'required': True}, - 'domain': {'required': True}, - 'access_token': {'required': True}, + 'linked_service_name': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, - 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, - 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, - 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, - 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, - 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, - 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, - 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, - 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, - 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, - 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, - 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, - 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, - 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, - 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDatabricksLinkedService, self).__init__(**kwargs) - self.type = 'AzureDatabricks' # type: str - self.domain = kwargs['domain'] - self.access_token = kwargs['access_token'] - self.existing_cluster_id = kwargs.get('existing_cluster_id', None) - self.instance_pool_id = kwargs.get('instance_pool_id', None) - self.new_cluster_version = kwargs.get('new_cluster_version', None) - self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) - self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) - self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) - self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) - self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) - self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) - self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) - self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) - self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) - self.encrypted_credential = kwargs.get('encrypted_credential', None) + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) -class ExecutionActivity(Activity): - """Base class for all execution activities. +class ExportSettings(msrest.serialization.Model): + """Export command settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} } def __init__( self, **kwargs ): - super(ExecutionActivity, self).__init__(**kwargs) - self.type = 'Execution' # type: str - self.linked_service_name = kwargs.get('linked_service_name', None) - self.policy = kwargs.get('policy', None) + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ExportSettings' # type: str -class AzureDataExplorerCommandActivity(ExecutionActivity): - """Azure Data Explorer command activity. +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. All required parameters must be populated in order to send to Azure. :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param name: Required. Activity name. - :type name: str - :param type: Required. Type of activity.Constant filled by server. + :param type: Required. The export setting type.Constant filled by server. :type type: str - :param description: Activity description. - :type description: str - :param depends_on: Activity depends on condition. - :type depends_on: list[~data_factory_management_client.models.ActivityDependency] - :param user_properties: Activity user properties. - :type user_properties: list[~data_factory_management_client.models.UserProperty] - :param linked_service_name: Linked service reference. - :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference - :param policy: Activity policy. - :type policy: ~data_factory_management_client.models.ActivityPolicy - :param command: Required. A control command, according to the Azure Data Explorer command - syntax. Type: string (or Expression with resultType string). - :type command: object - :param command_timeout: Control command timeout. Type: string (or Expression with resultType - string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). - :type command_timeout: object + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object """ _validation = { - 'name': {'required': True}, 'type': {'required': True}, - 'command': {'required': True}, } _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, - 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, - 'command': {'key': 'typeProperties.command', 'type': 'object'}, - 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) - self.type = 'AzureDataExplorerCommand' # type: str - self.command = kwargs['command'] - self.command_timeout = kwargs.get('command_timeout', None) + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) -class AzureDataExplorerLinkedService(LinkedService): - """Azure Data Explorer (Kusto) linked service. +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. All required parameters must be populated in order to send to Azure. @@ -3005,31 +2995,25 @@ class AzureDataExplorerLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] - :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL - will be in the format https://:code:``.:code:``.kusto.windows.net. - Type: string (or Expression with resultType string). - :type endpoint: object - :param service_principal_id: Required. The ID of the service principal used to authenticate - against Azure Data Explorer. Type: string (or Expression with resultType string). - :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal used to authenticate - against Kusto. - :type service_principal_key: ~data_factory_management_client.models.SecretBase - :param database: Required. Database name for connection. Type: string (or Expression with + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). - :type database: object - :param tenant: Required. The name or ID of the tenant to which the service principal belongs. - Type: string (or Expression with resultType string). - :type tenant: object + :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, - 'database': {'required': True}, - 'tenant': {'required': True}, + 'domain': {'required': True}, } _attribute_map = { @@ -3039,28 +3023,467 @@ class AzureDataExplorerLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, - 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, - 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, - 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, - 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } def __init__( self, **kwargs ): - super(AzureDataExplorerLinkedService, self).__init__(**kwargs) - self.type = 'AzureDataExplorer' # type: str - self.endpoint = kwargs['endpoint'] - self.service_principal_id = kwargs['service_principal_id'] - self.service_principal_key = kwargs['service_principal_key'] - self.database = kwargs['database'] - self.tenant = kwargs['tenant'] + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs.get('access_token', None) + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureDataExplorerSink(CopySink): - """A copy activity Azure Data Explorer sink. +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + + +class AzureDatabricksLinkedService(LinkedService): + """Azure Databricks linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression + with resultType string). + :type access_token: ~data_factory_management_client.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :type workspace_resource_id: object + :param existing_cluster_id: The id of an existing interactive cluster that will be used for all + runs of this activity. Type: string (or Expression with resultType string). + :type existing_cluster_id: object + :param instance_pool_id: The id of an existing instance pool that will be used for all runs of + this activity. Type: string (or Expression with resultType string). + :type instance_pool_id: object + :param new_cluster_version: If not using an existing interactive cluster, this specifies the + Spark version of a new job cluster or instance pool nodes created for each run of this + activity. Required if instancePoolId is specified. Type: string (or Expression with resultType + string). + :type new_cluster_version: object + :param new_cluster_num_of_worker: If not using an existing interactive cluster, this specifies + the number of worker nodes to use for the new job cluster or instance pool. For new job + clusters, this a string-formatted Int32, like '1' means numOfWorker is 1 or '1:10' means auto- + scale from 1 (min) to 10 (max). For instance pools, this is a string-formatted Int32, and can + only specify a fixed number of worker nodes, such as '2'. Required if newClusterVersion is + specified. Type: string (or Expression with resultType string). + :type new_cluster_num_of_worker: object + :param new_cluster_node_type: The node type of the new job cluster. This property is required + if newClusterVersion is specified and instancePoolId is not specified. If instancePoolId is + specified, this property is ignored. Type: string (or Expression with resultType string). + :type new_cluster_node_type: object + :param new_cluster_spark_conf: A set of optional, user-specified Spark configuration key-value + pairs. + :type new_cluster_spark_conf: dict[str, object] + :param new_cluster_spark_env_vars: A set of optional, user-specified Spark environment + variables key-value pairs. + :type new_cluster_spark_env_vars: dict[str, object] + :param new_cluster_custom_tags: Additional tags for cluster resources. This property is ignored + in instance pool configurations. + :type new_cluster_custom_tags: dict[str, object] + :param new_cluster_log_destination: Specify a location to deliver Spark driver, worker, and + event logs. Type: string (or Expression with resultType string). + :type new_cluster_log_destination: object + :param new_cluster_driver_node_type: The driver node type for the new job cluster. This + property is ignored in instance pool configurations. Type: string (or Expression with + resultType string). + :type new_cluster_driver_node_type: object + :param new_cluster_init_scripts: User-defined initialization scripts for the new cluster. Type: + array of strings (or Expression with resultType array of strings). + :type new_cluster_init_scripts: object + :param new_cluster_enable_elastic_disk: Enable the elastic disk on the new cluster. This + property is now ignored, and takes the default elastic disk behavior in Databricks (elastic + disks are always enabled). Type: boolean (or Expression with resultType boolean). + :type new_cluster_enable_elastic_disk: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure clusters based on a user + defined set of rules. Type: string (or Expression with resultType string). + :type policy_id: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, + 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, + 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, + 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, + 'new_cluster_num_of_worker': {'key': 'typeProperties.newClusterNumOfWorker', 'type': 'object'}, + 'new_cluster_node_type': {'key': 'typeProperties.newClusterNodeType', 'type': 'object'}, + 'new_cluster_spark_conf': {'key': 'typeProperties.newClusterSparkConf', 'type': '{object}'}, + 'new_cluster_spark_env_vars': {'key': 'typeProperties.newClusterSparkEnvVars', 'type': '{object}'}, + 'new_cluster_custom_tags': {'key': 'typeProperties.newClusterCustomTags', 'type': '{object}'}, + 'new_cluster_log_destination': {'key': 'typeProperties.newClusterLogDestination', 'type': 'object'}, + 'new_cluster_driver_node_type': {'key': 'typeProperties.newClusterDriverNodeType', 'type': 'object'}, + 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, + 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDatabricksLinkedService, self).__init__(**kwargs) + self.type = 'AzureDatabricks' # type: str + self.domain = kwargs['domain'] + self.access_token = kwargs.get('access_token', None) + self.authentication = kwargs.get('authentication', None) + self.workspace_resource_id = kwargs.get('workspace_resource_id', None) + self.existing_cluster_id = kwargs.get('existing_cluster_id', None) + self.instance_pool_id = kwargs.get('instance_pool_id', None) + self.new_cluster_version = kwargs.get('new_cluster_version', None) + self.new_cluster_num_of_worker = kwargs.get('new_cluster_num_of_worker', None) + self.new_cluster_node_type = kwargs.get('new_cluster_node_type', None) + self.new_cluster_spark_conf = kwargs.get('new_cluster_spark_conf', None) + self.new_cluster_spark_env_vars = kwargs.get('new_cluster_spark_env_vars', None) + self.new_cluster_custom_tags = kwargs.get('new_cluster_custom_tags', None) + self.new_cluster_log_destination = kwargs.get('new_cluster_log_destination', None) + self.new_cluster_driver_node_type = kwargs.get('new_cluster_driver_node_type', None) + self.new_cluster_init_scripts = kwargs.get('new_cluster_init_scripts', None) + self.new_cluster_enable_elastic_disk = kwargs.get('new_cluster_enable_elastic_disk', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.policy_id = kwargs.get('policy_id', None) + + +class ExecutionActivity(Activity): + """Base class for all execution activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + } + + _subtype_map = { + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + } + + def __init__( + self, + **kwargs + ): + super(ExecutionActivity, self).__init__(**kwargs) + self.type = 'Execution' # type: str + self.linked_service_name = kwargs.get('linked_service_name', None) + self.policy = kwargs.get('policy', None) + + +class AzureDataExplorerCommandActivity(ExecutionActivity): + """Azure Data Explorer command activity. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param name: Required. Activity name. + :type name: str + :param type: Required. Type of activity.Constant filled by server. + :type type: str + :param description: Activity description. + :type description: str + :param depends_on: Activity depends on condition. + :type depends_on: list[~data_factory_management_client.models.ActivityDependency] + :param user_properties: Activity user properties. + :type user_properties: list[~data_factory_management_client.models.UserProperty] + :param linked_service_name: Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param policy: Activity policy. + :type policy: ~data_factory_management_client.models.ActivityPolicy + :param command: Required. A control command, according to the Azure Data Explorer command + syntax. Type: string (or Expression with resultType string). + :type command: object + :param command_timeout: Control command timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..). + :type command_timeout: object + """ + + _validation = { + 'name': {'required': True}, + 'type': {'required': True}, + 'command': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, + 'command': {'key': 'typeProperties.command', 'type': 'object'}, + 'command_timeout': {'key': 'typeProperties.commandTimeout', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataExplorerCommandActivity, self).__init__(**kwargs) + self.type = 'AzureDataExplorerCommand' # type: str + self.command = kwargs['command'] + self.command_timeout = kwargs.get('command_timeout', None) + + +class AzureDataExplorerLinkedService(LinkedService): + """Azure Data Explorer (Kusto) linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param endpoint: Required. The endpoint of Azure Data Explorer (the engine's endpoint). URL + will be in the format https://:code:``.:code:``.kusto.windows.net. + Type: string (or Expression with resultType string). + :type endpoint: object + :param service_principal_id: Required. The ID of the service principal used to authenticate + against Azure Data Explorer. Type: string (or Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: Required. The key of the service principal used to authenticate + against Kusto. + :type service_principal_key: ~data_factory_management_client.models.SecretBase + :param database: Required. Database name for connection. Type: string (or Expression with + resultType string). + :type database: object + :param tenant: Required. The name or ID of the tenant to which the service principal belongs. + Type: string (or Expression with resultType string). + :type tenant: object + """ + + _validation = { + 'type': {'required': True}, + 'endpoint': {'required': True}, + 'service_principal_id': {'required': True}, + 'service_principal_key': {'required': True}, + 'database': {'required': True}, + 'tenant': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'endpoint': {'key': 'typeProperties.endpoint', 'type': 'object'}, + 'service_principal_id': {'key': 'typeProperties.servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(AzureDataExplorerLinkedService, self).__init__(**kwargs) + self.type = 'AzureDataExplorer' # type: str + self.endpoint = kwargs['endpoint'] + self.service_principal_id = kwargs['service_principal_id'] + self.service_principal_key = kwargs['service_principal_key'] + self.database = kwargs['database'] + self.tenant = kwargs['tenant'] + + +class AzureDataExplorerSink(CopySink): + """A copy activity Azure Data Explorer sink. All required parameters must be populated in order to send to Azure. @@ -4254,7 +4677,7 @@ def __init__( self.secret_version = kwargs.get('secret_version', None) -class AzureMariaDBLinkedService(LinkedService): +class AzureMariaDbLinkedService(LinkedService): """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -4303,14 +4726,14 @@ def __init__( self, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(**kwargs) + super(AzureMariaDbLinkedService, self).__init__(**kwargs) self.type = 'AzureMariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMariaDBSource(TabularSource): +class AzureMariaDbSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. @@ -4359,12 +4782,12 @@ def __init__( self, **kwargs ): - super(AzureMariaDBSource, self).__init__(**kwargs) + super(AzureMariaDbSource, self).__init__(**kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = kwargs.get('query', None) -class AzureMariaDBTableDataset(Dataset): +class AzureMariaDbTableDataset(Dataset): """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -4417,7 +4840,7 @@ def __init__( self, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(**kwargs) + super(AzureMariaDbTableDataset, self).__init__(**kwargs) self.type = 'AzureMariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -4515,9 +4938,15 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~data_factory_management_client.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). + :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with + resultType string). :type ml_pipeline_id: object + :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string + (or Expression with resultType string). + :type ml_pipeline_endpoint_id: object + :param version: Version of the published Azure ML pipeline endpoint. Type: string (or + Expression with resultType string). + :type version: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -4527,6 +4956,10 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object + :param data_path_assignments: Dictionary used for changing data path assignments without + retraining. Values will be passed in the dataPathAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :type data_path_assignments: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -4541,7 +4974,6 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -4554,8 +4986,11 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } @@ -4566,9 +5001,12 @@ def __init__( ): super(AzureMlExecutePipelineActivity, self).__init__(**kwargs) self.type = 'AzureMLExecutePipeline' # type: str - self.ml_pipeline_id = kwargs['ml_pipeline_id'] + self.ml_pipeline_id = kwargs.get('ml_pipeline_id', None) + self.ml_pipeline_endpoint_id = kwargs.get('ml_pipeline_endpoint_id', None) + self.version = kwargs.get('version', None) self.experiment_name = kwargs.get('experiment_name', None) self.ml_pipeline_parameters = kwargs.get('ml_pipeline_parameters', None) + self.data_path_assignments = kwargs.get('data_path_assignments', None) self.ml_parent_run_id = kwargs.get('ml_parent_run_id', None) self.continue_on_step_failure = kwargs.get('continue_on_step_failure', None) @@ -4833,7 +5271,7 @@ def __init__( self.linked_service_name = kwargs['linked_service_name'] -class AzureMySQLLinkedService(LinkedService): +class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -4883,14 +5321,14 @@ def __init__( self, **kwargs ): - super(AzureMySQLLinkedService, self).__init__(**kwargs) + super(AzureMySqlLinkedService, self).__init__(**kwargs) self.type = 'AzureMySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureMySQLSink(CopySink): +class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -4939,12 +5377,12 @@ def __init__( self, **kwargs ): - super(AzureMySQLSink, self).__init__(**kwargs) + super(AzureMySqlSink, self).__init__(**kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) -class AzureMySQLSource(TabularSource): +class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -4992,12 +5430,12 @@ def __init__( self, **kwargs ): - super(AzureMySQLSource, self).__init__(**kwargs) + super(AzureMySqlSource, self).__init__(**kwargs) self.type = 'AzureMySqlSource' # type: str self.query = kwargs.get('query', None) -class AzureMySQLTableDataset(Dataset): +class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5055,13 +5493,13 @@ def __init__( self, **kwargs ): - super(AzureMySQLTableDataset, self).__init__(**kwargs) + super(AzureMySqlTableDataset, self).__init__(**kwargs) self.type = 'AzureMySqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) -class AzurePostgreSQLLinkedService(LinkedService): +class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5110,14 +5548,14 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLLinkedService, self).__init__(**kwargs) + super(AzurePostgreSqlLinkedService, self).__init__(**kwargs) self.type = 'AzurePostgreSql' # type: str self.connection_string = kwargs.get('connection_string', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzurePostgreSQLSink(CopySink): +class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. @@ -5166,12 +5604,12 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLSink, self).__init__(**kwargs) + super(AzurePostgreSqlSink, self).__init__(**kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) -class AzurePostgreSQLSource(TabularSource): +class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -5220,12 +5658,12 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLSource, self).__init__(**kwargs) + super(AzurePostgreSqlSource, self).__init__(**kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = kwargs.get('query', None) -class AzurePostgreSQLTableDataset(Dataset): +class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -5287,7 +5725,7 @@ def __init__( self, **kwargs ): - super(AzurePostgreSQLTableDataset, self).__init__(**kwargs) + super(AzurePostgreSqlTableDataset, self).__init__(**kwargs) self.type = 'AzurePostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) @@ -5515,7 +5953,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSQLDatabaseLinkedService(LinkedService): +class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -5582,7 +6020,7 @@ def __init__( self, **kwargs ): - super(AzureSQLDatabaseLinkedService, self).__init__(**kwargs) + super(AzureSqlDatabaseLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlDatabase' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) @@ -5593,7 +6031,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSQLDWLinkedService(LinkedService): +class AzureSqlDwLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. @@ -5660,7 +6098,7 @@ def __init__( self, **kwargs ): - super(AzureSQLDWLinkedService, self).__init__(**kwargs) + super(AzureSqlDwLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlDW' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) @@ -5671,7 +6109,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSQLDWTableDataset(Dataset): +class AzureSqlDwTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. @@ -5733,14 +6171,14 @@ def __init__( self, **kwargs ): - super(AzureSQLDWTableDataset, self).__init__(**kwargs) + super(AzureSqlDwTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlDWTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class AzureSQLMiLinkedService(LinkedService): +class AzureSqlMiLinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. @@ -5807,7 +6245,7 @@ def __init__( self, **kwargs ): - super(AzureSQLMiLinkedService, self).__init__(**kwargs) + super(AzureSqlMiLinkedService, self).__init__(**kwargs) self.type = 'AzureSqlMI' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) @@ -5818,7 +6256,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class AzureSQLMiTableDataset(Dataset): +class AzureSqlMiTableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. @@ -5880,14 +6318,14 @@ def __init__( self, **kwargs ): - super(AzureSQLMiTableDataset, self).__init__(**kwargs) + super(AzureSqlMiTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlMITable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class AzureSQLSink(CopySink): +class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. @@ -5956,7 +6394,7 @@ def __init__( self, **kwargs ): - super(AzureSQLSink, self).__init__(**kwargs) + super(AzureSqlSink, self).__init__(**kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -5966,7 +6404,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class AzureSQLSource(TabularSource): +class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -6005,9 +6443,9 @@ class AzureSQLSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -6026,15 +6464,15 @@ class AzureSQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(AzureSQLSource, self).__init__(**kwargs) + super(AzureSqlSource, self).__init__(**kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -6044,7 +6482,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class AzureSQLTableDataset(Dataset): +class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. @@ -6106,7 +6544,7 @@ def __init__( self, **kwargs ): - super(AzureSQLTableDataset, self).__init__(**kwargs) + super(AzureSqlTableDataset, self).__init__(**kwargs) self.type = 'AzureSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) @@ -6722,7 +7160,7 @@ class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. @@ -6759,7 +7197,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( @@ -7365,6 +7803,26 @@ def __init__( self.password = kwargs['password'] +class CmkIdentityDefinition(msrest.serialization.Model): + """Managed Identity used for CMK. + + :param user_assigned_identity: The resource id of the user assigned identity to authenticate to + customer's key vault. + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CmkIdentityDefinition, self).__init__(**kwargs) + self.user_assigned_identity = kwargs.get('user_assigned_identity', None) + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -7699,7 +8157,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -7720,7 +8178,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -7750,6 +8208,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -7786,6 +8247,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -7801,6 +8263,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(**kwargs) self.type = 'Concur' # type: str + self.connection_properties = kwargs.get('connection_properties', None) self.client_id = kwargs['client_id'] self.username = kwargs['username'] self.password = kwargs.get('password', None) @@ -8050,9 +8513,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -8092,6 +8557,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -8116,12 +8582,38 @@ def __init__( self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) self.log_storage_settings = kwargs.get('log_storage_settings', None) + self.log_settings = kwargs.get('log_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) self.validate_data_consistency = kwargs.get('validate_data_consistency', None) self.skip_error_file = kwargs.get('skip_error_file', None) +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = kwargs.get('log_level', None) + self.enable_reliable_logging = kwargs.get('enable_reliable_logging', None) + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -8159,7 +8651,7 @@ def __init__( self.type = 'CopyTranslator' # type: str -class CosmosDBLinkedService(LinkedService): +class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -8216,7 +8708,7 @@ def __init__( self, **kwargs ): - super(CosmosDBLinkedService, self).__init__(**kwargs) + super(CosmosDbLinkedService, self).__init__(**kwargs) self.type = 'CosmosDb' # type: str self.connection_string = kwargs.get('connection_string', None) self.account_endpoint = kwargs.get('account_endpoint', None) @@ -8225,7 +8717,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class CosmosDBMongoDBApiCollectionDataset(Dataset): +class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -8280,12 +8772,12 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiCollectionDataset, self).__init__(**kwargs) + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = kwargs['collection'] -class CosmosDBMongoDBApiLinkedService(LinkedService): +class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -8333,13 +8825,13 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiLinkedService, self).__init__(**kwargs) + super(CosmosDbMongoDbApiLinkedService, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] -class CosmosDBMongoDBApiSink(CopySink): +class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -8389,12 +8881,12 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiSink, self).__init__(**kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) -class CosmosDBMongoDBApiSource(CopySource): +class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -8418,7 +8910,7 @@ class CosmosDBMongoDBApiSource(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -8443,7 +8935,7 @@ class CosmosDBMongoDBApiSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -8453,7 +8945,7 @@ def __init__( self, **kwargs ): - super(CosmosDBMongoDBApiSource, self).__init__(**kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(**kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) @@ -8462,7 +8954,7 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class CosmosDBSQLApiCollectionDataset(Dataset): +class CosmosDbSqlApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -8517,12 +9009,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSQLApiCollectionDataset, self).__init__(**kwargs) + super(CosmosDbSqlApiCollectionDataset, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = kwargs['collection_name'] -class CosmosDBSQLApiSink(CopySink): +class CosmosDbSqlApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. @@ -8571,12 +9063,12 @@ def __init__( self, **kwargs ): - super(CosmosDBSQLApiSink, self).__init__(**kwargs) + super(CosmosDbSqlApiSink, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = kwargs.get('write_behavior', None) -class CosmosDBSQLApiSource(CopySource): +class CosmosDbSqlApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -8632,7 +9124,7 @@ def __init__( self, **kwargs ): - super(CosmosDBSQLApiSource, self).__init__(**kwargs) + super(CosmosDbSqlApiSource, self).__init__(**kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = kwargs.get('query', None) self.page_size = kwargs.get('page_size', None) @@ -8820,18 +9312,16 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param integration_runtime: Set to use integration runtime setting for data flow debug session. + :type integration_runtime: + ~data_factory_management_client.models.IntegrationRuntimeDebugResource """ _attribute_map = { 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'name': {'key': 'integrationRuntime.name', 'type': 'str'}, - 'properties': {'key': 'integrationRuntime.properties', 'type': 'IntegrationRuntime'}, + 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, } def __init__( @@ -8842,8 +9332,7 @@ def __init__( self.compute_type = kwargs.get('compute_type', None) self.core_count = kwargs.get('core_count', None) self.time_to_live = kwargs.get('time_to_live', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) + self.integration_runtime = kwargs.get('integration_runtime', None) class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): @@ -8967,6 +9456,9 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :type auto_user_specification: object """ _validation = { @@ -8990,6 +9482,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__( @@ -9004,6 +9497,7 @@ def __init__( self.reference_objects = kwargs.get('reference_objects', None) self.extended_properties = kwargs.get('extended_properties', None) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) + self.auto_user_specification = kwargs.get('auto_user_specification', None) class CustomActivityReferenceObject(msrest.serialization.Model): @@ -9133,6 +9627,71 @@ def __init__( self.type_properties = kwargs['type_properties'] +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern provided for trigger + to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CustomEventsTrigger, self).__init__(**kwargs) + self.type = 'CustomEventsTrigger' # type: str + self.subject_begins_with = kwargs.get('subject_begins_with', None) + self.subject_ends_with = kwargs.get('subject_ends_with', None) + self.events = kwargs['events'] + self.scope = kwargs['scope'] + + class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. @@ -9452,60 +10011,33 @@ def __init__( class DataFlowDebugPackage(msrest.serialization.Model): """Request body structure for starting data flow debug session. - Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] :param session_id: The ID of data flow debug session. :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~data_factory_management_client.models.DataFlowDebugResource :param datasets: List of datasets. :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] :param linked_services: List of linked services. :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters_debug_settings_parameters: Data flow parameters. - :type parameters_debug_settings_parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters_staging_linked_service_parameters: Arguments for LinkedService. - :type parameters_staging_linked_service_parameters: dict[str, object] - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param staging: Staging info for debug session. + :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~data_factory_management_client.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'source_settings': {'key': 'debugSettings.sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters_debug_settings_parameters': {'key': 'debugSettings.parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'debugSettings.datasetParameters', 'type': 'object'}, - 'folder_path': {'key': 'staging.folderPath', 'type': 'object'}, - 'type': {'key': 'staging.linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'staging.linkedService.referenceName', 'type': 'str'}, - 'parameters_staging_linked_service_parameters': {'key': 'staging.linkedService.parameters', 'type': '{object}'}, - 'name': {'key': 'dataFlow.name', 'type': 'str'}, - 'properties': {'key': 'dataFlow.properties', 'type': 'DataFlow'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } - type = "LinkedServiceReference" - def __init__( self, **kwargs @@ -9513,16 +10045,38 @@ def __init__( super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.session_id = kwargs.get('session_id', None) + self.data_flow = kwargs.get('data_flow', None) self.datasets = kwargs.get('datasets', None) self.linked_services = kwargs.get('linked_services', None) + self.staging = kwargs.get('staging', None) + self.debug_settings = kwargs.get('debug_settings', None) + + +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = kwargs.get('source_settings', None) - self.parameters_debug_settings_parameters = kwargs.get('parameters_debug_settings_parameters', None) + self.parameters = kwargs.get('parameters', None) self.dataset_parameters = kwargs.get('dataset_parameters', None) - self.folder_path = kwargs.get('folder_path', None) - self.reference_name = kwargs.get('reference_name', None) - self.parameters_staging_linked_service_parameters = kwargs.get('parameters_staging_linked_service_parameters', None) - self.name = kwargs.get('name', None) - self.properties = kwargs.get('properties', None) class SubResourceDebugResource(msrest.serialization.Model): @@ -9939,40 +10493,25 @@ def __init__( class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. - Variables are only populated by the server, and will be ignored when sending a request. - + :param linked_service: Staging linked service reference. + :type linked_service: ~data_factory_management_client.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'type': {'key': 'linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'linkedService.referenceName', 'type': 'str'}, - 'parameters': {'key': 'linkedService.parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - def __init__( self, **kwargs ): super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = kwargs.get('linked_service', None) self.folder_path = kwargs.get('folder_path', None) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) class DataLakeAnalyticsUsqlActivity(ExecutionActivity): @@ -10063,7 +10602,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -10084,7 +10623,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -10398,6 +10937,68 @@ def __init__( self.type = kwargs.get('type', None) +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.type = 'TarGZip' # type: str + self.level = kwargs.get('level', None) + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -10778,7 +11379,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -11005,6 +11606,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -11017,6 +11625,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -11027,6 +11637,8 @@ def __init__( self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs['file_extension'] + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) class DependencyReference(msrest.serialization.Model): @@ -11099,7 +11711,7 @@ def __init__( self.distcp_options = kwargs.get('distcp_options', None) -class DocumentDBCollectionDataset(Dataset): +class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. All required parameters must be populated in order to send to Azure. @@ -11154,12 +11766,12 @@ def __init__( self, **kwargs ): - super(DocumentDBCollectionDataset, self).__init__(**kwargs) + super(DocumentDbCollectionDataset, self).__init__(**kwargs) self.type = 'DocumentDbCollection' # type: str self.collection_name = kwargs['collection_name'] -class DocumentDBCollectionSink(CopySink): +class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. All required parameters must be populated in order to send to Azure. @@ -11212,13 +11824,13 @@ def __init__( self, **kwargs ): - super(DocumentDBCollectionSink, self).__init__(**kwargs) + super(DocumentDbCollectionSink, self).__init__(**kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = kwargs.get('nesting_separator', None) self.write_behavior = kwargs.get('write_behavior', None) -class DocumentDBCollectionSource(CopySource): +class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. All required parameters must be populated in order to send to Azure. @@ -11270,7 +11882,7 @@ def __init__( self, **kwargs ): - super(DocumentDBCollectionSource, self).__init__(**kwargs) + super(DocumentDbCollectionSource, self).__init__(**kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) @@ -11456,7 +12068,7 @@ def __init__( self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) -class DWCopyCommandDefaultValue(msrest.serialization.Model): +class DwCopyCommandDefaultValue(msrest.serialization.Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). @@ -11475,19 +12087,19 @@ def __init__( self, **kwargs ): - super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + super(DwCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = kwargs.get('column_name', None) self.default_value = kwargs.get('default_value', None) -class DWCopyCommandSettings(msrest.serialization.Model): +class DwCopyCommandSettings(msrest.serialization.Model): """DW Copy Command settings. :param default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~data_factory_management_client.models.DWCopyCommandDefaultValue] + :type default_values: list[~data_factory_management_client.models.DwCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. @@ -11495,7 +12107,7 @@ class DWCopyCommandSettings(msrest.serialization.Model): """ _attribute_map = { - 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'default_values': {'key': 'defaultValues', 'type': '[DwCopyCommandDefaultValue]'}, 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, } @@ -11503,7 +12115,7 @@ def __init__( self, **kwargs ): - super(DWCopyCommandSettings, self).__init__(**kwargs) + super(DwCopyCommandSettings, self).__init__(**kwargs) self.default_values = kwargs.get('default_values', None) self.additional_options = kwargs.get('additional_options', None) @@ -12483,6 +13095,47 @@ def __init__( self.query = kwargs.get('query', None) +class EncryptionConfiguration(msrest.serialization.Model): + """Definition of CMK for the factory. + + All required parameters must be populated in order to send to Azure. + + :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + Key. + :type key_name: str + :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :type vault_base_url: str + :param key_version: The version of the key used for CMK. If not provided, latest version will + be used. + :type key_version: str + :param identity: User assigned identity to use to authenticate to customer's key vault. If not + provided Managed Service Identity will be used. + :type identity: ~data_factory_management_client.models.CmkIdentityDefinition + """ + + _validation = { + 'key_name': {'required': True}, + 'vault_base_url': {'required': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'CmkIdentityDefinition'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionConfiguration, self).__init__(**kwargs) + self.key_name = kwargs['key_name'] + self.vault_base_url = kwargs['vault_base_url'] + self.key_version = kwargs.get('key_version', None) + self.identity = kwargs.get('identity', None) + + class EntityReference(msrest.serialization.Model): """The entity reference. @@ -12705,6 +13358,16 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param compute: Compute properties for data flow activity. :type compute: ~data_factory_management_client.models.ExecuteDataFlowActivityTypePropertiesCompute + :param trace_level: Trace level setting used for data flow monitoring output. Supported values + are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :type trace_level: object + :param continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :type continue_on_error: object + :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + the same save order to be processed concurrently. Type: boolean (or Expression with resultType + boolean). + :type run_concurrently: object """ _validation = { @@ -12726,6 +13389,9 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, + 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, + 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, } def __init__( @@ -12738,22 +13404,26 @@ def __init__( self.staging = kwargs.get('staging', None) self.integration_runtime = kwargs.get('integration_runtime', None) self.compute = kwargs.get('compute', None) + self.trace_level = kwargs.get('trace_level', None) + self.continue_on_error = kwargs.get('continue_on_error', None) + self.run_concurrently = kwargs.get('run_concurrently', None) class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :type compute_type: object :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). + :type core_count: object """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'compute_type': {'key': 'computeType', 'type': 'object'}, + 'core_count': {'key': 'coreCount', 'type': 'object'}, } def __init__( @@ -12927,43 +13597,6 @@ def __init__( self.log_location = kwargs.get('log_location', None) -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ExportSettings' # type: str - - class ExposureControlBatchRequest(msrest.serialization.Model): """A list of exposure control features. @@ -13183,6 +13816,11 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param encryption: Properties to enable Customer Managed Key for the factory. + :type encryption: ~data_factory_management_client.models.EncryptionConfiguration + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -13209,6 +13847,8 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -13223,6 +13863,8 @@ def __init__( self.version = None self.repo_configuration = kwargs.get('repo_configuration', None) self.global_parameters = kwargs.get('global_parameters', None) + self.encryption = kwargs.get('encryption', None) + self.public_network_access = kwargs.get('public_network_access', None) class FactoryRepoConfiguration(msrest.serialization.Model): @@ -13336,13 +13978,14 @@ class FactoryIdentity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. - Default value: "SystemAssigned". + :ivar type: Required. The identity type. Default value: "SystemAssigned". :vartype type: str :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str + :param user_assigned_identities: List of user assigned identities for the factory. + :type user_assigned_identities: dict[str, object] """ _validation = { @@ -13355,6 +13998,7 @@ class FactoryIdentity(msrest.serialization.Model): 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, } type = "SystemAssigned" @@ -13366,6 +14010,7 @@ def __init__( super(FactoryIdentity, self).__init__(**kwargs) self.principal_id = None self.tenant_id = None + self.user_assigned_identities = kwargs.get('user_assigned_identities', None) class FactoryListResponse(msrest.serialization.Model): @@ -17446,43 +18091,6 @@ def __init__( self.query = kwargs.get('query', None) -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = 'ImportSettings' # type: str - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -18279,6 +18887,9 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): include: "Basic", "Standard", "Premium", "PremiumRS". :type catalog_pricing_tier: str or ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogPricingTier + :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to + support SSISDB failover. + :type dual_standby_pair_name: str """ _validation = { @@ -18291,6 +18902,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, } def __init__( @@ -18303,6 +18915,7 @@ def __init__( self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) self.catalog_admin_password = kwargs.get('catalog_admin_password', None) self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + self.dual_standby_pair_name = kwargs.get('dual_standby_pair_name', None) class IntegrationRuntimeSsisProperties(msrest.serialization.Model): @@ -19323,8 +19936,74 @@ def __init__( self.properties = kwargs['properties'] +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = kwargs['linked_service_name'] + self.path = kwargs.get('path', None) + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = kwargs.get('enable_copy_activity_log', None) + self.copy_activity_log_settings = kwargs.get('copy_activity_log_settings', None) + self.log_location_settings = kwargs['log_location_settings'] + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -19634,6 +20313,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: + ~data_factory_management_client.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~data_factory_management_client.models.IntegrationRuntimeComputeProperties @@ -19651,6 +20333,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } @@ -19662,6 +20345,7 @@ def __init__( super(ManagedIntegrationRuntime, self).__init__(**kwargs) self.type = 'Managed' # type: str self.state = None + self.managed_virtual_network = kwargs.get('managed_virtual_network', None) self.compute_properties = kwargs.get('compute_properties', None) self.ssis_properties = kwargs.get('ssis_properties', None) @@ -19957,6 +20641,8 @@ class ManagedPrivateEndpointResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -19965,19 +20651,8 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :ivar is_reserved: Denotes whether the managed private endpoint is reserved. - :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :ivar provisioning_state: The managed private endpoint provisioning state. - :vartype provisioning_state: str + :param properties: Required. Managed private endpoint properties. + :type properties: ~data_factory_management_client.models.ManagedPrivateEndpoint """ _validation = { @@ -19985,8 +20660,7 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, - 'is_reserved': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { @@ -19994,12 +20668,7 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'connection_state': {'key': 'properties.connectionState', 'type': 'ConnectionStateProperties'}, - 'fqdns': {'key': 'properties.fqdns', 'type': '[str]'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'is_reserved': {'key': 'properties.isReserved', 'type': 'bool'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, } def __init__( @@ -20007,12 +20676,7 @@ def __init__( **kwargs ): super(ManagedPrivateEndpointResource, self).__init__(**kwargs) - self.connection_state = kwargs.get('connection_state', None) - self.fqdns = kwargs.get('fqdns', None) - self.group_id = kwargs.get('group_id', None) - self.is_reserved = None - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.provisioning_state = None + self.properties = kwargs['properties'] class ManagedVirtualNetwork(msrest.serialization.Model): @@ -20079,6 +20743,40 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class ManagedVirtualNetworkReference(msrest.serialization.Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default value: + "ManagedVirtualNetworkReference". + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__( + self, + **kwargs + ): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + + class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. @@ -20167,7 +20865,7 @@ def __init__( self.script = kwargs.get('script', None) -class MariaDBLinkedService(LinkedService): +class MariaDbLinkedService(LinkedService): """MariaDB server linked service. All required parameters must be populated in order to send to Azure. @@ -20216,14 +20914,14 @@ def __init__( self, **kwargs ): - super(MariaDBLinkedService, self).__init__(**kwargs) + super(MariaDbLinkedService, self).__init__(**kwargs) self.type = 'MariaDB' # type: str self.connection_string = kwargs.get('connection_string', None) self.pwd = kwargs.get('pwd', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MariaDBSource(TabularSource): +class MariaDbSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. @@ -20272,12 +20970,12 @@ def __init__( self, **kwargs ): - super(MariaDBSource, self).__init__(**kwargs) + super(MariaDbSource, self).__init__(**kwargs) self.type = 'MariaDBSource' # type: str self.query = kwargs.get('query', None) -class MariaDBTableDataset(Dataset): +class MariaDbTableDataset(Dataset): """MariaDB server dataset. All required parameters must be populated in order to send to Azure. @@ -20330,7 +21028,7 @@ def __init__( self, **kwargs ): - super(MariaDBTableDataset, self).__init__(**kwargs) + super(MariaDbTableDataset, self).__init__(**kwargs) self.type = 'MariaDBTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -20729,14 +21427,74 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~data_factory_management_client.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(**kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = kwargs.get('table_name', None) + + +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -20749,19 +21507,141 @@ class MicrosoftAccessTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, **kwargs ): - super(MicrosoftAccessTableDataset, self).__init__(**kwargs) - self.type = 'MicrosoftAccessTable' # type: str - self.table_name = kwargs.get('table_name', None) + super(MongoDbAtlasCollectionDataset, self).__init__(**kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = kwargs['collection'] + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasLinkedService, self).__init__(**kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = kwargs['connection_string'] + self.database = kwargs['database'] + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + def __init__( + self, + **kwargs + ): + super(MongoDbAtlasSource, self).__init__(**kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = kwargs.get('filter', None) + self.cursor_methods = kwargs.get('cursor_methods', None) + self.batch_size = kwargs.get('batch_size', None) + self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) -class MongoDBCollectionDataset(Dataset): + +class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -20816,12 +21696,12 @@ def __init__( self, **kwargs ): - super(MongoDBCollectionDataset, self).__init__(**kwargs) + super(MongoDbCollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbCollection' # type: str self.collection_name = kwargs['collection_name'] -class MongoDBCursorMethodsProperties(msrest.serialization.Model): +class MongoDbCursorMethodsProperties(msrest.serialization.Model): """Cursor methods for Mongodb query. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -20856,7 +21736,7 @@ def __init__( self, **kwargs ): - super(MongoDBCursorMethodsProperties, self).__init__(**kwargs) + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = kwargs.get('additional_properties', None) self.project = kwargs.get('project', None) self.sort = kwargs.get('sort', None) @@ -20864,7 +21744,7 @@ def __init__( self.limit = kwargs.get('limit', None) -class MongoDBLinkedService(LinkedService): +class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. @@ -20888,7 +21768,7 @@ class MongoDBLinkedService(LinkedService): :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". :type authentication_type: str or - ~data_factory_management_client.models.MongoDBAuthenticationType + ~data_factory_management_client.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object @@ -20944,7 +21824,7 @@ def __init__( self, **kwargs ): - super(MongoDBLinkedService, self).__init__(**kwargs) + super(MongoDbLinkedService, self).__init__(**kwargs) self.type = 'MongoDb' # type: str self.server = kwargs['server'] self.authentication_type = kwargs.get('authentication_type', None) @@ -20958,7 +21838,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MongoDBSource(CopySource): +class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -21003,13 +21883,13 @@ def __init__( self, **kwargs ): - super(MongoDBSource, self).__init__(**kwargs) + super(MongoDbSource, self).__init__(**kwargs) self.type = 'MongoDbSource' # type: str self.query = kwargs.get('query', None) self.additional_columns = kwargs.get('additional_columns', None) -class MongoDBV2CollectionDataset(Dataset): +class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -21064,12 +21944,12 @@ def __init__( self, **kwargs ): - super(MongoDBV2CollectionDataset, self).__init__(**kwargs) + super(MongoDbV2CollectionDataset, self).__init__(**kwargs) self.type = 'MongoDbV2Collection' # type: str self.collection = kwargs['collection'] -class MongoDBV2LinkedService(LinkedService): +class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. All required parameters must be populated in order to send to Azure. @@ -21116,13 +21996,13 @@ def __init__( self, **kwargs ): - super(MongoDBV2LinkedService, self).__init__(**kwargs) + super(MongoDbV2LinkedService, self).__init__(**kwargs) self.type = 'MongoDbV2' # type: str self.connection_string = kwargs['connection_string'] self.database = kwargs['database'] -class MongoDBV2Source(CopySource): +class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -21146,7 +22026,7 @@ class MongoDBV2Source(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -21171,7 +22051,7 @@ class MongoDBV2Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -21181,7 +22061,7 @@ def __init__( self, **kwargs ): - super(MongoDBV2Source, self).__init__(**kwargs) + super(MongoDbV2Source, self).__init__(**kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = kwargs.get('filter', None) self.cursor_methods = kwargs.get('cursor_methods', None) @@ -21190,7 +22070,7 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) -class MySQLLinkedService(LinkedService): +class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -21239,14 +22119,14 @@ def __init__( self, **kwargs ): - super(MySQLLinkedService, self).__init__(**kwargs) + super(MySqlLinkedService, self).__init__(**kwargs) self.type = 'MySql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class MySQLSource(TabularSource): +class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -21294,12 +22174,12 @@ def __init__( self, **kwargs ): - super(MySQLSource, self).__init__(**kwargs) + super(MySqlSource, self).__init__(**kwargs) self.type = 'MySqlSource' # type: str self.query = kwargs.get('query', None) -class MySQLTableDataset(Dataset): +class MySqlTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -21352,7 +22232,7 @@ def __init__( self, **kwargs ): - super(MySQLTableDataset, self).__init__(**kwargs) + super(MySqlTableDataset, self).__init__(**kwargs) self.type = 'MySqlTable' # type: str self.table_name = kwargs.get('table_name', None) @@ -21475,7 +22355,7 @@ class NetezzaSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.NetezzaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~data_factory_management_client.models.NetezzaPartitionSettings """ @@ -21493,7 +22373,7 @@ class NetezzaSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } @@ -21624,7 +22504,7 @@ class ODataLinkedService(LinkedService): :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAADServicePrincipalCredentialType + ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~data_factory_management_client.models.SecretBase @@ -22861,7 +23741,7 @@ class OracleSource(CopySource): :type query_timeout: object :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.OraclePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -22881,7 +23761,7 @@ class OracleSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -22997,7 +23877,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -23092,6 +23972,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -23107,6 +23989,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -23116,6 +23999,7 @@ def __init__( super(OrcSink, self).__init__(**kwargs) self.type = 'OrcSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class OrcSource(CopySource): @@ -23168,6 +24052,46 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(OrcWriteSettings, self).__init__(**kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -23258,7 +24182,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -23353,6 +24277,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -23368,6 +24294,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -23377,6 +24304,7 @@ def __init__( super(ParquetSink, self).__init__(**kwargs) self.type = 'ParquetSink' # type: str self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) class ParquetSource(CopySource): @@ -23429,6 +24357,46 @@ def __init__( self.additional_columns = kwargs.get('additional_columns', None) +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -23847,6 +24815,44 @@ def __init__( self.query = kwargs.get('query', None) +class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): + """Pipeline ElapsedTime Metric Policy. + + :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :type duration: object + """ + + _attribute_map = { + 'duration': {'key': 'duration', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + self.duration = kwargs.get('duration', None) + + +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelineFolder, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + + class PipelineListResponse(msrest.serialization.Model): """A list of pipeline resources. @@ -23876,6 +24882,26 @@ def __init__( self.next_link = kwargs.get('next_link', None) +class PipelinePolicy(msrest.serialization.Model): + """Pipeline Policy. + + :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :type elapsed_time_metric: + ~data_factory_management_client.models.PipelineElapsedTimeMetricPolicy + """ + + _attribute_map = { + 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + } + + def __init__( + self, + **kwargs + ): + super(PipelinePolicy, self).__init__(**kwargs) + self.elapsed_time_metric = kwargs.get('elapsed_time_metric', None) + + class PipelineReference(msrest.serialization.Model): """Pipeline reference type. @@ -23943,8 +24969,11 @@ class PipelineResource(SubResource): :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] - :param name_folder_name: The name of the folder that this Pipeline is in. - :type name_folder_name: str + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~data_factory_management_client.models.PipelineFolder + :param policy: Pipeline Policy. + :type policy: ~data_factory_management_client.models.PipelinePolicy """ _validation = { @@ -23968,7 +24997,8 @@ class PipelineResource(SubResource): 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'name_folder_name': {'key': 'folder.name', 'type': 'str'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, } def __init__( @@ -23984,7 +25014,8 @@ def __init__( self.concurrency = kwargs.get('concurrency', None) self.annotations = kwargs.get('annotations', None) self.run_dimensions = kwargs.get('run_dimensions', None) - self.name_folder_name = kwargs.get('name_folder_name', None) + self.folder = kwargs.get('folder', None) + self.policy = kwargs.get('policy', None) class PipelineRun(msrest.serialization.Model): @@ -24184,7 +25215,7 @@ def __init__( self.use_type_default = kwargs.get('use_type_default', None) -class PostgreSQLLinkedService(LinkedService): +class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -24233,14 +25264,14 @@ def __init__( self, **kwargs ): - super(PostgreSQLLinkedService, self).__init__(**kwargs) + super(PostgreSqlLinkedService, self).__init__(**kwargs) self.type = 'PostgreSql' # type: str self.connection_string = kwargs['connection_string'] self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) -class PostgreSQLSource(TabularSource): +class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -24288,12 +25319,12 @@ def __init__( self, **kwargs ): - super(PostgreSQLSource, self).__init__(**kwargs) + super(PostgreSqlSource, self).__init__(**kwargs) self.type = 'PostgreSqlSource' # type: str self.query = kwargs.get('query', None) -class PostgreSQLTableDataset(Dataset): +class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -24354,7 +25385,7 @@ def __init__( self, **kwargs ): - super(PostgreSQLTableDataset, self).__init__(**kwargs) + super(PostgreSqlTableDataset, self).__init__(**kwargs) self.type = 'PostgreSqlTable' # type: str self.table_name = kwargs.get('table_name', None) self.table = kwargs.get('table', None) @@ -25537,13 +26568,9 @@ class RestSink(CopySink): :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds. :type request_interval: object - :param compression_type: Compression Type to Send data in compressed format with Optimal - Compression Level, Default is None. And The Only Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling - the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} - else the format is []. - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25562,8 +26589,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__( @@ -25576,8 +26602,7 @@ def __init__( self.additional_headers = kwargs.get('additional_headers', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) - self.compression_type = kwargs.get('compression_type', None) - self.wrap_request_json_in_an_object = kwargs.get('wrap_request_json_in_an_object', None) + self.http_compression_type = kwargs.get('http_compression_type', None) class RestSource(CopySource): @@ -27248,7 +28273,7 @@ class SapHanaSource(TabularSource): :type packet_size: object :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SapHanaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~data_factory_management_client.models.SapHanaPartitionSettings @@ -27268,7 +28293,7 @@ class SapHanaSource(TabularSource): 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } @@ -27858,7 +28883,7 @@ class SapTableSource(TabularSource): :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: str or ~data_factory_management_client.models.SapTablePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~data_factory_management_client.models.SapTablePartitionSettings @@ -27883,7 +28908,7 @@ class SapTableSource(TabularSource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -29861,7 +30886,7 @@ def __init__( self.query = kwargs.get('query', None) -class SQLDWSink(CopySink): +class SqlDwSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. @@ -29899,7 +30924,7 @@ class SQLDWSink(CopySink): :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~data_factory_management_client.models.DWCopyCommandSettings + :type copy_command_settings: ~data_factory_management_client.models.DwCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object @@ -29921,7 +30946,7 @@ class SQLDWSink(CopySink): 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, - 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } @@ -29929,7 +30954,7 @@ def __init__( self, **kwargs ): - super(SQLDWSink, self).__init__(**kwargs) + super(SqlDwSink, self).__init__(**kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = kwargs.get('pre_copy_script', None) self.allow_poly_base = kwargs.get('allow_poly_base', None) @@ -29939,7 +30964,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLDWSource(TabularSource): +class SqlDwSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -29977,9 +31002,9 @@ class SQLDWSource(TabularSource): :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -29997,15 +31022,15 @@ class SQLDWSource(TabularSource): 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLDWSource, self).__init__(**kwargs) + super(SqlDwSource, self).__init__(**kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -30014,7 +31039,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SQLMiSink(CopySink): +class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. @@ -30083,7 +31108,7 @@ def __init__( self, **kwargs ): - super(SQLMiSink, self).__init__(**kwargs) + super(SqlMiSink, self).__init__(**kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -30093,7 +31118,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLMiSource(TabularSource): +class SqlMiSource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -30132,9 +31157,9 @@ class SQLMiSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -30153,15 +31178,15 @@ class SQLMiSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLMiSource, self).__init__(**kwargs) + super(SqlMiSource, self).__init__(**kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -30171,7 +31196,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SQLPartitionSettings(msrest.serialization.Model): +class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be @@ -30200,13 +31225,13 @@ def __init__( self, **kwargs ): - super(SQLPartitionSettings, self).__init__(**kwargs) + super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = kwargs.get('partition_column_name', None) self.partition_upper_bound = kwargs.get('partition_upper_bound', None) self.partition_lower_bound = kwargs.get('partition_lower_bound', None) -class SQLServerLinkedService(LinkedService): +class SqlServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. @@ -30260,7 +31285,7 @@ def __init__( self, **kwargs ): - super(SQLServerLinkedService, self).__init__(**kwargs) + super(SqlServerLinkedService, self).__init__(**kwargs) self.type = 'SqlServer' # type: str self.connection_string = kwargs['connection_string'] self.user_name = kwargs.get('user_name', None) @@ -30268,7 +31293,7 @@ def __init__( self.encrypted_credential = kwargs.get('encrypted_credential', None) -class SQLServerSink(CopySink): +class SqlServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. @@ -30337,7 +31362,7 @@ def __init__( self, **kwargs ): - super(SQLServerSink, self).__init__(**kwargs) + super(SqlServerSink, self).__init__(**kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -30347,7 +31372,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLServerSource(TabularSource): +class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -30386,9 +31411,9 @@ class SQLServerSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -30407,15 +31432,15 @@ class SQLServerSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLServerSource, self).__init__(**kwargs) + super(SqlServerSource, self).__init__(**kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -30425,7 +31450,7 @@ def __init__( self.partition_settings = kwargs.get('partition_settings', None) -class SQLServerStoredProcedureActivity(ExecutionActivity): +class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -30479,13 +31504,13 @@ def __init__( self, **kwargs ): - super(SQLServerStoredProcedureActivity, self).__init__(**kwargs) + super(SqlServerStoredProcedureActivity, self).__init__(**kwargs) self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = kwargs['stored_procedure_name'] self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) -class SQLServerTableDataset(Dataset): +class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. @@ -30547,14 +31572,14 @@ def __init__( self, **kwargs ): - super(SQLServerTableDataset, self).__init__(**kwargs) + super(SqlServerTableDataset, self).__init__(**kwargs) self.type = 'SqlServerTable' # type: str self.table_name = kwargs.get('table_name', None) self.schema_type_properties_schema = kwargs.get('schema_type_properties_schema', None) self.table = kwargs.get('table', None) -class SQLSink(CopySink): +class SqlSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. @@ -30623,7 +31648,7 @@ def __init__( self, **kwargs ): - super(SQLSink, self).__init__(**kwargs) + super(SqlSink, self).__init__(**kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = kwargs.get('sql_writer_stored_procedure_name', None) self.sql_writer_table_type = kwargs.get('sql_writer_table_type', None) @@ -30633,7 +31658,7 @@ def __init__( self.table_option = kwargs.get('table_option', None) -class SQLSource(TabularSource): +class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -30674,9 +31699,9 @@ class SQLSource(TabularSource): :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -30695,15 +31720,15 @@ class SQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( self, **kwargs ): - super(SQLSource, self).__init__(**kwargs) + super(SqlSource, self).__init__(**kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) @@ -31999,6 +33024,74 @@ def __init__( self.type_conversion_settings = kwargs.get('type_conversion_settings', None) +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(TarReadSettings, self).__init__(**kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32131,7 +33224,7 @@ class TeradataSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.TeradataPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~data_factory_management_client.models.TeradataPartitionSettings @@ -32150,7 +33243,7 @@ class TeradataSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } @@ -32688,7 +33781,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. @@ -33441,17 +34534,24 @@ class WebActivityAuthentication(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Web activity authentication (Basic/ClientCertificate/MSI). + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file or Certificate when used for + ServicePrincipal. :type pfx: ~data_factory_management_client.models.SecretBase - :param username: Web activity authentication user name for basic authentication. + :param username: Web activity authentication user name for basic authentication or ClientID + when used for ServicePrincipal. :type username: str - :param password: Password for the PFX file or basic authentication. + :param password: Password for the PFX file or basic authentication / Secret when used for + ServicePrincipal. :type password: ~data_factory_management_client.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. :type resource: str + :param user_tenant: TenantId for which Azure Auth token will be requested when using + ServicePrincipal Authentication. Type: string (or Expression with resultType string). + :type user_tenant: object """ _validation = { @@ -33464,6 +34564,7 @@ class WebActivityAuthentication(msrest.serialization.Model): 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'str'}, + 'user_tenant': {'key': 'userTenant', 'type': 'object'}, } def __init__( @@ -33476,6 +34577,7 @@ def __init__( self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.resource = kwargs.get('resource', None) + self.user_tenant = kwargs.get('user_tenant', None) class WebLinkedServiceTypeProperties(msrest.serialization.Model): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index 060634eb408..f7c444c39b9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -378,7 +378,7 @@ class LinkedService(msrest.serialization.Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDBLinkedService, AzureMySQLLinkedService, AzurePostgreSQLLinkedService, AzureSearchLinkedService, AzureSQLDWLinkedService, AzureSQLDatabaseLinkedService, AzureSQLMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDBLinkedService, CosmosDBMongoDBApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDBLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDBLinkedService, MongoDBV2LinkedService, MySQLLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSQLLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SQLServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. + sub-classes are: AmazonMwsLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, AzureBatchLinkedService, AzureBlobFsLinkedService, AzureBlobStorageLinkedService, AzureDataExplorerLinkedService, AzureDataLakeAnalyticsLinkedService, AzureDataLakeStoreLinkedService, AzureDatabricksLinkedService, AzureDatabricksDeltaLakeLinkedService, AzureFileStorageLinkedService, AzureFunctionLinkedService, AzureKeyVaultLinkedService, AzureMlLinkedService, AzureMlServiceLinkedService, AzureMariaDbLinkedService, AzureMySqlLinkedService, AzurePostgreSqlLinkedService, AzureSearchLinkedService, AzureSqlDwLinkedService, AzureSqlDatabaseLinkedService, AzureSqlMiLinkedService, AzureStorageLinkedService, AzureTableStorageLinkedService, CassandraLinkedService, CommonDataServiceForAppsLinkedService, ConcurLinkedService, CosmosDbLinkedService, CosmosDbMongoDbApiLinkedService, CouchbaseLinkedService, CustomDataSourceLinkedService, Db2LinkedService, DrillLinkedService, DynamicsLinkedService, DynamicsAxLinkedService, DynamicsCrmLinkedService, EloquaLinkedService, FileServerLinkedService, FtpServerLinkedService, GoogleAdWordsLinkedService, GoogleBigQueryLinkedService, GoogleCloudStorageLinkedService, GreenplumLinkedService, HBaseLinkedService, HdInsightLinkedService, HdInsightOnDemandLinkedService, HdfsLinkedService, HiveLinkedService, HttpLinkedService, HubspotLinkedService, ImpalaLinkedService, InformixLinkedService, JiraLinkedService, MagentoLinkedService, MariaDbLinkedService, MarketoLinkedService, MicrosoftAccessLinkedService, MongoDbLinkedService, MongoDbAtlasLinkedService, MongoDbV2LinkedService, MySqlLinkedService, NetezzaLinkedService, ODataLinkedService, OdbcLinkedService, Office365LinkedService, OracleLinkedService, OracleServiceCloudLinkedService, PaypalLinkedService, PhoenixLinkedService, PostgreSqlLinkedService, PrestoLinkedService, QuickBooksLinkedService, ResponsysLinkedService, RestServiceLinkedService, SalesforceLinkedService, SalesforceMarketingCloudLinkedService, SalesforceServiceCloudLinkedService, SapBwLinkedService, SapCloudForCustomerLinkedService, SapEccLinkedService, SapHanaLinkedService, SapOpenHubLinkedService, SapTableLinkedService, ServiceNowLinkedService, SftpServerLinkedService, SharePointOnlineListLinkedService, ShopifyLinkedService, SnowflakeLinkedService, SparkLinkedService, SqlServerLinkedService, SquareLinkedService, SybaseLinkedService, TeradataLinkedService, VerticaLinkedService, WebLinkedService, XeroLinkedService, ZohoLinkedService. All required parameters must be populated in order to send to Azure. @@ -411,7 +411,7 @@ class LinkedService(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'AzureMySql': 'AzureMySQLLinkedService', 'AzurePostgreSql': 'AzurePostgreSQLLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSQLDWLinkedService', 'AzureSqlDatabase': 'AzureSQLDatabaseLinkedService', 'AzureSqlMI': 'AzureSQLMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDBLinkedService', 'CosmosDbMongoDbApi': 'CosmosDBMongoDBApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDBLinkedService', 'MongoDbV2': 'MongoDBV2LinkedService', 'MySql': 'MySQLLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSQLLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SQLServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} + 'type': {'AmazonMWS': 'AmazonMwsLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureBlobFS': 'AzureBlobFsLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureML': 'AzureMlLinkedService', 'AzureMLService': 'AzureMlServiceLinkedService', 'AzureMariaDB': 'AzureMariaDbLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'AzureSqlDW': 'AzureSqlDwLinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'AzureSqlMI': 'AzureSqlMiLinkedService', 'AzureStorage': 'AzureStorageLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'Cassandra': 'CassandraLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'Concur': 'ConcurLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'Db2': 'Db2LinkedService', 'Drill': 'DrillLinkedService', 'Dynamics': 'DynamicsLinkedService', 'DynamicsAX': 'DynamicsAxLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Eloqua': 'EloquaLinkedService', 'FileServer': 'FileServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'Greenplum': 'GreenplumLinkedService', 'HBase': 'HBaseLinkedService', 'HDInsight': 'HdInsightLinkedService', 'HDInsightOnDemand': 'HdInsightOnDemandLinkedService', 'Hdfs': 'HdfsLinkedService', 'Hive': 'HiveLinkedService', 'HttpServer': 'HttpLinkedService', 'Hubspot': 'HubspotLinkedService', 'Impala': 'ImpalaLinkedService', 'Informix': 'InformixLinkedService', 'Jira': 'JiraLinkedService', 'Magento': 'MagentoLinkedService', 'MariaDB': 'MariaDbLinkedService', 'Marketo': 'MarketoLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'MongoDb': 'MongoDbLinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MySql': 'MySqlLinkedService', 'Netezza': 'NetezzaLinkedService', 'OData': 'ODataLinkedService', 'Odbc': 'OdbcLinkedService', 'Office365': 'Office365LinkedService', 'Oracle': 'OracleLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'Paypal': 'PaypalLinkedService', 'Phoenix': 'PhoenixLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'Presto': 'PrestoLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Responsys': 'ResponsysLinkedService', 'RestService': 'RestServiceLinkedService', 'Salesforce': 'SalesforceLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'SapBW': 'SapBwLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapTable': 'SapTableLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'Sftp': 'SftpServerLinkedService', 'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Shopify': 'ShopifyLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'Spark': 'SparkLinkedService', 'SqlServer': 'SqlServerLinkedService', 'Square': 'SquareLinkedService', 'Sybase': 'SybaseLinkedService', 'Teradata': 'TeradataLinkedService', 'Vertica': 'VerticaLinkedService', 'Web': 'WebLinkedService', 'Xero': 'XeroLinkedService', 'Zoho': 'ZohoLinkedService'} } def __init__( @@ -547,7 +547,7 @@ class Dataset(msrest.serialization.Model): """The Azure Data Factory nested object which identifies data within different data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureMariaDBTableDataset, AzureMySQLTableDataset, AzurePostgreSQLTableDataset, AzureSearchIndexDataset, AzureSQLDWTableDataset, AzureSQLMiTableDataset, AzureSQLTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDBMongoDBApiCollectionDataset, CosmosDBSQLApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDBCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDBTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDBCollectionDataset, MongoDBV2CollectionDataset, MySQLTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSQLTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SQLServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. + sub-classes are: AmazonMwsObjectDataset, AmazonRedshiftTableDataset, AmazonS3Dataset, AvroDataset, AzureBlobDataset, AzureBlobFsDataset, AzureDataExplorerTableDataset, AzureDataLakeStoreDataset, AzureDatabricksDeltaLakeDataset, AzureMariaDbTableDataset, AzureMySqlTableDataset, AzurePostgreSqlTableDataset, AzureSearchIndexDataset, AzureSqlDwTableDataset, AzureSqlMiTableDataset, AzureSqlTableDataset, AzureTableDataset, BinaryDataset, CassandraTableDataset, CommonDataServiceForAppsEntityDataset, ConcurObjectDataset, CosmosDbMongoDbApiCollectionDataset, CosmosDbSqlApiCollectionDataset, CouchbaseTableDataset, CustomDataset, Db2TableDataset, DelimitedTextDataset, DocumentDbCollectionDataset, DrillTableDataset, DynamicsAxResourceDataset, DynamicsCrmEntityDataset, DynamicsEntityDataset, EloquaObjectDataset, ExcelDataset, FileShareDataset, GoogleAdWordsObjectDataset, GoogleBigQueryObjectDataset, GreenplumTableDataset, HBaseObjectDataset, HiveObjectDataset, HttpDataset, HubspotObjectDataset, ImpalaObjectDataset, InformixTableDataset, JiraObjectDataset, JsonDataset, MagentoObjectDataset, MariaDbTableDataset, MarketoObjectDataset, MicrosoftAccessTableDataset, MongoDbAtlasCollectionDataset, MongoDbCollectionDataset, MongoDbV2CollectionDataset, MySqlTableDataset, NetezzaTableDataset, ODataResourceDataset, OdbcTableDataset, Office365Dataset, OracleServiceCloudObjectDataset, OracleTableDataset, OrcDataset, ParquetDataset, PaypalObjectDataset, PhoenixObjectDataset, PostgreSqlTableDataset, PrestoObjectDataset, QuickBooksObjectDataset, RelationalTableDataset, ResponsysObjectDataset, RestResourceDataset, SalesforceMarketingCloudObjectDataset, SalesforceObjectDataset, SalesforceServiceCloudObjectDataset, SapBwCubeDataset, SapCloudForCustomerResourceDataset, SapEccResourceDataset, SapHanaTableDataset, SapOpenHubTableDataset, SapTableResourceDataset, ServiceNowObjectDataset, SharePointOnlineListResourceDataset, ShopifyObjectDataset, SnowflakeDataset, SparkObjectDataset, SqlServerTableDataset, SquareObjectDataset, SybaseTableDataset, TeradataTableDataset, VerticaTableDataset, WebTableDataset, XeroObjectDataset, XmlDataset, ZohoObjectDataset. All required parameters must be populated in order to send to Azure. @@ -593,7 +593,7 @@ class Dataset(msrest.serialization.Model): } _subtype_map = { - 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'AzureMySqlTable': 'AzureMySQLTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSQLTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSQLDWTableDataset', 'AzureSqlMITable': 'AzureSQLMiTableDataset', 'AzureSqlTable': 'AzureSQLTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDBMongoDBApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDBSQLApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDBCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbCollection': 'MongoDBCollectionDataset', 'MongoDbV2Collection': 'MongoDBV2CollectionDataset', 'MySqlTable': 'MySQLTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSQLTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SQLServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} + 'type': {'AmazonMWSObject': 'AmazonMwsObjectDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AmazonS3Object': 'AmazonS3Dataset', 'Avro': 'AvroDataset', 'AzureBlob': 'AzureBlobDataset', 'AzureBlobFSFile': 'AzureBlobFsDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'AzureMariaDBTable': 'AzureMariaDbTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'AzureSqlDWTable': 'AzureSqlDwTableDataset', 'AzureSqlMITable': 'AzureSqlMiTableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'Binary': 'BinaryDataset', 'CassandraTable': 'CassandraTableDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'ConcurObject': 'ConcurObjectDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'CustomDataset': 'CustomDataset', 'Db2Table': 'Db2TableDataset', 'DelimitedText': 'DelimitedTextDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'DrillTable': 'DrillTableDataset', 'DynamicsAXResource': 'DynamicsAxResourceDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'EloquaObject': 'EloquaObjectDataset', 'Excel': 'ExcelDataset', 'FileShare': 'FileShareDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'HBaseObject': 'HBaseObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HttpFile': 'HttpDataset', 'HubspotObject': 'HubspotObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'InformixTable': 'InformixTableDataset', 'JiraObject': 'JiraObjectDataset', 'Json': 'JsonDataset', 'MagentoObject': 'MagentoObjectDataset', 'MariaDBTable': 'MariaDbTableDataset', 'MarketoObject': 'MarketoObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollection': 'MongoDbAtlasCollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MySqlTable': 'MySqlTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ODataResource': 'ODataResourceDataset', 'OdbcTable': 'OdbcTableDataset', 'Office365Table': 'Office365Dataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'OracleTable': 'OracleTableDataset', 'Orc': 'OrcDataset', 'Parquet': 'ParquetDataset', 'PaypalObject': 'PaypalObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'PrestoObject': 'PrestoObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'RelationalTable': 'RelationalTableDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'RestResource': 'RestResourceDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SapBwCube': 'SapBwCubeDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'SnowflakeTable': 'SnowflakeDataset', 'SparkObject': 'SparkObjectDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SquareObject': 'SquareObjectDataset', 'SybaseTable': 'SybaseTableDataset', 'TeradataTable': 'TeradataTableDataset', 'VerticaTable': 'VerticaTableDataset', 'WebTable': 'WebTableDataset', 'XeroObject': 'XeroObjectDataset', 'Xml': 'XmlDataset', 'ZohoObject': 'ZohoObjectDataset'} } def __init__( @@ -693,7 +693,7 @@ class CopySource(msrest.serialization.Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDBMongoDBApiSource, CosmosDBSQLApiSource, DelimitedTextSource, DocumentDBCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDBSource, MongoDBV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. + sub-classes are: AvroSource, AzureBlobFsSource, AzureDataExplorerSource, AzureDataLakeStoreSource, AzureDatabricksDeltaLakeSource, BinarySource, BlobSource, CommonDataServiceForAppsSource, CosmosDbMongoDbApiSource, CosmosDbSqlApiSource, DelimitedTextSource, DocumentDbCollectionSource, DynamicsCrmSource, DynamicsSource, ExcelSource, FileSystemSource, HdfsSource, HttpSource, JsonSource, MicrosoftAccessSource, MongoDbAtlasSource, MongoDbSource, MongoDbV2Source, ODataSource, Office365Source, OracleSource, OrcSource, ParquetSource, RelationalSource, RestSource, SalesforceServiceCloudSource, SharePointOnlineListSource, SnowflakeSource, TabularSource, WebSource, XmlSource. All required parameters must be populated in order to send to Azure. @@ -726,7 +726,7 @@ class CopySource(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDBMongoDBApiSource', 'CosmosDbSqlApiSource': 'CosmosDBSQLApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDBCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbSource': 'MongoDBSource', 'MongoDbV2Source': 'MongoDBV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} + 'type': {'AvroSource': 'AvroSource', 'AzureBlobFSSource': 'AzureBlobFsSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'BinarySource': 'BinarySource', 'BlobSource': 'BlobSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DelimitedTextSource': 'DelimitedTextSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'ExcelSource': 'ExcelSource', 'FileSystemSource': 'FileSystemSource', 'HdfsSource': 'HdfsSource', 'HttpSource': 'HttpSource', 'JsonSource': 'JsonSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'MongoDbAtlasSource': 'MongoDbAtlasSource', 'MongoDbSource': 'MongoDbSource', 'MongoDbV2Source': 'MongoDbV2Source', 'ODataSource': 'ODataSource', 'Office365Source': 'Office365Source', 'OracleSource': 'OracleSource', 'OrcSource': 'OrcSource', 'ParquetSource': 'ParquetSource', 'RelationalSource': 'RelationalSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'TabularSource': 'TabularSource', 'WebSource': 'WebSource', 'XmlSource': 'XmlSource'} } def __init__( @@ -750,7 +750,7 @@ class TabularSource(CopySource): """Copy activity sources of tabular type. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDBSource, AzureMySQLSource, AzurePostgreSQLSource, AzureSQLSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDBSource, MarketoSource, MySQLSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSQLSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SQLDWSource, SQLMiSource, SQLServerSource, SQLSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. + sub-classes are: AmazonMwsSource, AmazonRedshiftSource, AzureMariaDbSource, AzureMySqlSource, AzurePostgreSqlSource, AzureSqlSource, AzureTableSource, CassandraSource, ConcurSource, CouchbaseSource, Db2Source, DrillSource, DynamicsAxSource, EloquaSource, GoogleAdWordsSource, GoogleBigQuerySource, GreenplumSource, HBaseSource, HiveSource, HubspotSource, ImpalaSource, InformixSource, JiraSource, MagentoSource, MariaDbSource, MarketoSource, MySqlSource, NetezzaSource, OdbcSource, OracleServiceCloudSource, PaypalSource, PhoenixSource, PostgreSqlSource, PrestoSource, QuickBooksSource, ResponsysSource, SalesforceMarketingCloudSource, SalesforceSource, SapBwSource, SapCloudForCustomerSource, SapEccSource, SapHanaSource, SapOpenHubSource, SapTableSource, ServiceNowSource, ShopifySource, SparkSource, SqlDwSource, SqlMiSource, SqlServerSource, SqlSource, SquareSource, SybaseSource, TeradataSource, VerticaSource, XeroSource, ZohoSource. All required parameters must be populated in order to send to Azure. @@ -791,7 +791,7 @@ class TabularSource(CopySource): } _subtype_map = { - 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'AzureMySqlSource': 'AzureMySQLSource', 'AzurePostgreSqlSource': 'AzurePostgreSQLSource', 'AzureSqlSource': 'AzureSQLSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDBSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySQLSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSQLSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SQLDWSource', 'SqlMISource': 'SQLMiSource', 'SqlServerSource': 'SQLServerSource', 'SqlSource': 'SQLSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} + 'type': {'AmazonMWSSource': 'AmazonMwsSource', 'AmazonRedshiftSource': 'AmazonRedshiftSource', 'AzureMariaDBSource': 'AzureMariaDbSource', 'AzureMySqlSource': 'AzureMySqlSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AzureSqlSource': 'AzureSqlSource', 'AzureTableSource': 'AzureTableSource', 'CassandraSource': 'CassandraSource', 'ConcurSource': 'ConcurSource', 'CouchbaseSource': 'CouchbaseSource', 'Db2Source': 'Db2Source', 'DrillSource': 'DrillSource', 'DynamicsAXSource': 'DynamicsAxSource', 'EloquaSource': 'EloquaSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'GreenplumSource': 'GreenplumSource', 'HBaseSource': 'HBaseSource', 'HiveSource': 'HiveSource', 'HubspotSource': 'HubspotSource', 'ImpalaSource': 'ImpalaSource', 'InformixSource': 'InformixSource', 'JiraSource': 'JiraSource', 'MagentoSource': 'MagentoSource', 'MariaDBSource': 'MariaDbSource', 'MarketoSource': 'MarketoSource', 'MySqlSource': 'MySqlSource', 'NetezzaSource': 'NetezzaSource', 'OdbcSource': 'OdbcSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'PaypalSource': 'PaypalSource', 'PhoenixSource': 'PhoenixSource', 'PostgreSqlSource': 'PostgreSqlSource', 'PrestoSource': 'PrestoSource', 'QuickBooksSource': 'QuickBooksSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SapEccSource': 'SapEccSource', 'SapHanaSource': 'SapHanaSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapTableSource': 'SapTableSource', 'ServiceNowSource': 'ServiceNowSource', 'ShopifySource': 'ShopifySource', 'SparkSource': 'SparkSource', 'SqlDWSource': 'SqlDwSource', 'SqlMISource': 'SqlMiSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SquareSource': 'SquareSource', 'SybaseSource': 'SybaseSource', 'TeradataSource': 'TeradataSource', 'VerticaSource': 'VerticaSource', 'XeroSource': 'XeroSource', 'ZohoSource': 'ZohoSource'} } def __init__( @@ -1235,6 +1235,9 @@ class AmazonS3LinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param authentication_type: The authentication type of S3. Allowed value: AccessKey (default) + or TemporarySecurityCredentials. Type: string (or Expression with resultType string). + :type authentication_type: object :param access_key_id: The access key identifier of the Amazon S3 Identity and Access Management (IAM) user. Type: string (or Expression with resultType string). :type access_key_id: object @@ -1245,6 +1248,8 @@ class AmazonS3LinkedService(LinkedService): an optional property; change it only if you want to try a different service endpoint or want to switch between https and http. Type: string (or Expression with resultType string). :type service_url: object + :param session_token: The session token for the S3 temporary security credential. + :type session_token: ~data_factory_management_client.models.SecretBase :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -1262,9 +1267,11 @@ class AmazonS3LinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'object'}, 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'session_token': {'key': 'typeProperties.sessionToken', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -1276,17 +1283,21 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + authentication_type: Optional[object] = None, access_key_id: Optional[object] = None, secret_access_key: Optional["SecretBase"] = None, service_url: Optional[object] = None, + session_token: Optional["SecretBase"] = None, encrypted_credential: Optional[object] = None, **kwargs ): super(AmazonS3LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AmazonS3' # type: str + self.authentication_type = authentication_type self.access_key_id = access_key_id self.secret_access_key = secret_access_key self.service_url = service_url + self.session_token = session_token self.encrypted_credential = encrypted_credential @@ -1766,7 +1777,7 @@ class CopySink(msrest.serialization.Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureMySQLSink, AzurePostgreSQLSink, AzureQueueSink, AzureSearchIndexSink, AzureSQLSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDBMongoDBApiSink, CosmosDBSQLApiSink, DelimitedTextSink, DocumentDBCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SQLDWSink, SQLMiSink, SQLServerSink, SQLSink. + sub-classes are: AvroSink, AzureBlobFsSink, AzureDataExplorerSink, AzureDataLakeStoreSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, AzurePostgreSqlSink, AzureQueueSink, AzureSearchIndexSink, AzureSqlSink, AzureTableSink, BinarySink, BlobSink, CommonDataServiceForAppsSink, CosmosDbMongoDbApiSink, CosmosDbSqlApiSink, DelimitedTextSink, DocumentDbCollectionSink, DynamicsCrmSink, DynamicsSink, FileSystemSink, InformixSink, JsonSink, MicrosoftAccessSink, OdbcSink, OracleSink, OrcSink, ParquetSink, RestSink, SalesforceServiceCloudSink, SalesforceSink, SapCloudForCustomerSink, SnowflakeSink, SqlDwSink, SqlMiSink, SqlServerSink, SqlSink. All required parameters must be populated in order to send to Azure. @@ -1807,7 +1818,7 @@ class CopySink(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureMySqlSink': 'AzureMySQLSink', 'AzurePostgreSqlSink': 'AzurePostgreSQLSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSQLSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDBMongoDBApiSink', 'CosmosDbSqlApiSink': 'CosmosDBSQLApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDBCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SQLDWSink', 'SqlMISink': 'SQLMiSink', 'SqlServerSink': 'SQLServerSink', 'SqlSink': 'SQLSink'} + 'type': {'AvroSink': 'AvroSink', 'AzureBlobFSSink': 'AzureBlobFsSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'AzureQueueSink': 'AzureQueueSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureSqlSink': 'AzureSqlSink', 'AzureTableSink': 'AzureTableSink', 'BinarySink': 'BinarySink', 'BlobSink': 'BlobSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DelimitedTextSink': 'DelimitedTextSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'FileSystemSink': 'FileSystemSink', 'InformixSink': 'InformixSink', 'JsonSink': 'JsonSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'OdbcSink': 'OdbcSink', 'OracleSink': 'OracleSink', 'OrcSink': 'OrcSink', 'ParquetSink': 'ParquetSink', 'RestSink': 'RestSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDwSink', 'SqlMISink': 'SqlMiSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink'} } def __init__( @@ -1958,7 +1969,7 @@ class FormatWriteSettings(msrest.serialization.Model): """Format write settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings. + sub-classes are: AvroWriteSettings, DelimitedTextWriteSettings, JsonWriteSettings, OrcWriteSettings, ParquetWriteSettings. All required parameters must be populated in order to send to Azure. @@ -1979,7 +1990,7 @@ class FormatWriteSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings'} + 'type': {'AvroWriteSettings': 'AvroWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'JsonWriteSettings': 'JsonWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__( @@ -2007,6 +2018,13 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -2018,6 +2036,8 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -2026,12 +2046,16 @@ def __init__( additional_properties: Optional[Dict[str, object]] = None, record_name: Optional[str] = None, record_namespace: Optional[str] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'AvroWriteSettings' # type: str self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class CustomSetupBase(msrest.serialization.Model): @@ -3119,6 +3143,444 @@ def __init__( self.block_size_in_mb = block_size_in_mb +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param table: The name of delta table. Type: string (or Expression with resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or Expression with resultType + string). + :type database: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table: Optional[object] = None, + database: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'AzureDatabricksDeltaLakeDataset' # type: str + self.table = table + self.database = database + + +class ExportSettings(msrest.serialization.Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeExportCommand, SnowflakeExportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand', 'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ExportSettings' # type: str + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The export setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure Databricks Delta Lake Copy. + Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure Databricks Delta + Lake Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeExportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class ImportSettings(msrest.serialization.Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, SnowflakeImportCopyCommand. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = 'ImportSettings' # type: str + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The import setting type.Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks Delta Lake Copy. Type: + string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure Databricks Delta Lake + Copy. Type: string (or Expression with resultType string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + date_format: Optional[object] = None, + timestamp_format: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'AzureDatabricksDeltaLakeImportCommand' # type: str + self.date_format = date_format + self.timestamp_format = timestamp_format + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks + deployment. Type: string (or Expression with resultType string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type access_token: ~data_factory_management_client.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be used for all runs of + this job. Type: string (or Expression with resultType string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for authentication. Credentials are + encrypted using the integration runtime credential manager. Type: string (or Expression with + resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__( + self, + *, + domain: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + cluster_id: Optional[object] = None, + encrypted_credential: Optional[object] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'AzureDatabricksDeltaLake' # type: str + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy sink type.Constant filled by server. + :type type: str + :param write_batch_size: Write batch size. Type: integer (or Expression with resultType + integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression with resultType + integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the sink data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression with resultType + string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + write_batch_size: Optional[object] = None, + write_batch_timeout: Optional[object] = None, + sink_retry_count: Optional[object] = None, + sink_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + pre_copy_script: Optional[object] = None, + import_settings: Optional["AzureDatabricksDeltaLakeImportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSink' # type: str + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param query: Azure Databricks Delta Lake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~data_factory_management_client.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + query: Optional[object] = None, + export_settings: Optional["AzureDatabricksDeltaLakeExportCommand"] = None, + **kwargs + ): + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'AzureDatabricksDeltaLakeSource' # type: str + self.query = query + self.export_settings = export_settings + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -3140,10 +3602,16 @@ class AzureDatabricksLinkedService(LinkedService): :param domain: Required. :code:``.azuredatabricks.net, domain name of your Databricks deployment. Type: string (or Expression with resultType string). :type domain: object - :param access_token: Required. Access token for databricks REST API. Refer to + :param access_token: Access token for databricks REST API. Refer to https://docs.azuredatabricks.net/api/latest/authentication.html. Type: string (or Expression with resultType string). :type access_token: ~data_factory_management_client.models.SecretBase + :param authentication: Required to specify MSI, if using Workspace resource id for databricks + REST API. Type: string (or Expression with resultType string). + :type authentication: object + :param workspace_resource_id: Workspace resource id for databricks REST API. Type: string (or + Expression with resultType string). + :type workspace_resource_id: object :param existing_cluster_id: The id of an existing interactive cluster that will be used for all runs of this activity. Type: string (or Expression with resultType string). :type existing_cluster_id: object @@ -3193,12 +3661,14 @@ class AzureDatabricksLinkedService(LinkedService): encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param policy_id: The policy id for limiting the ability to configure clusters based on a user + defined set of rules. Type: string (or Expression with resultType string). + :type policy_id: object """ _validation = { 'type': {'required': True}, 'domain': {'required': True}, - 'access_token': {'required': True}, } _attribute_map = { @@ -3210,6 +3680,8 @@ class AzureDatabricksLinkedService(LinkedService): 'annotations': {'key': 'annotations', 'type': '[object]'}, 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'authentication': {'key': 'typeProperties.authentication', 'type': 'object'}, + 'workspace_resource_id': {'key': 'typeProperties.workspaceResourceId', 'type': 'object'}, 'existing_cluster_id': {'key': 'typeProperties.existingClusterId', 'type': 'object'}, 'instance_pool_id': {'key': 'typeProperties.instancePoolId', 'type': 'object'}, 'new_cluster_version': {'key': 'typeProperties.newClusterVersion', 'type': 'object'}, @@ -3223,18 +3695,21 @@ class AzureDatabricksLinkedService(LinkedService): 'new_cluster_init_scripts': {'key': 'typeProperties.newClusterInitScripts', 'type': 'object'}, 'new_cluster_enable_elastic_disk': {'key': 'typeProperties.newClusterEnableElasticDisk', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'policy_id': {'key': 'typeProperties.policyId', 'type': 'object'}, } def __init__( self, *, domain: object, - access_token: "SecretBase", additional_properties: Optional[Dict[str, object]] = None, connect_via: Optional["IntegrationRuntimeReference"] = None, description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + access_token: Optional["SecretBase"] = None, + authentication: Optional[object] = None, + workspace_resource_id: Optional[object] = None, existing_cluster_id: Optional[object] = None, instance_pool_id: Optional[object] = None, new_cluster_version: Optional[object] = None, @@ -3248,12 +3723,15 @@ def __init__( new_cluster_init_scripts: Optional[object] = None, new_cluster_enable_elastic_disk: Optional[object] = None, encrypted_credential: Optional[object] = None, + policy_id: Optional[object] = None, **kwargs ): super(AzureDatabricksLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureDatabricks' # type: str self.domain = domain self.access_token = access_token + self.authentication = authentication + self.workspace_resource_id = workspace_resource_id self.existing_cluster_id = existing_cluster_id self.instance_pool_id = instance_pool_id self.new_cluster_version = new_cluster_version @@ -3267,13 +3745,14 @@ def __init__( self.new_cluster_init_scripts = new_cluster_init_scripts self.new_cluster_enable_elastic_disk = new_cluster_enable_elastic_disk self.encrypted_credential = encrypted_credential + self.policy_id = policy_id class ExecutionActivity(Activity): """Base class for all execution activities. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SQLServerStoredProcedureActivity, WebActivity. + sub-classes are: AzureDataExplorerCommandActivity, AzureFunctionActivity, AzureMlBatchExecutionActivity, AzureMlExecutePipelineActivity, AzureMlUpdateResourceActivity, CopyActivity, CustomActivity, DataLakeAnalyticsUsqlActivity, DatabricksNotebookActivity, DatabricksSparkJarActivity, DatabricksSparkPythonActivity, DeleteActivity, ExecuteDataFlowActivity, ExecuteSsisPackageActivity, GetMetadataActivity, HdInsightHiveActivity, HdInsightMapReduceActivity, HdInsightPigActivity, HdInsightSparkActivity, HdInsightStreamingActivity, LookupActivity, SqlServerStoredProcedureActivity, WebActivity. All required parameters must be populated in order to send to Azure. @@ -3313,7 +3792,7 @@ class ExecutionActivity(Activity): } _subtype_map = { - 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SQLServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} + 'type': {'AzureDataExplorerCommand': 'AzureDataExplorerCommandActivity', 'AzureFunctionActivity': 'AzureFunctionActivity', 'AzureMLBatchExecution': 'AzureMlBatchExecutionActivity', 'AzureMLExecutePipeline': 'AzureMlExecutePipelineActivity', 'AzureMLUpdateResource': 'AzureMlUpdateResourceActivity', 'Copy': 'CopyActivity', 'Custom': 'CustomActivity', 'DataLakeAnalyticsU-SQL': 'DataLakeAnalyticsUsqlActivity', 'DatabricksNotebook': 'DatabricksNotebookActivity', 'DatabricksSparkJar': 'DatabricksSparkJarActivity', 'DatabricksSparkPython': 'DatabricksSparkPythonActivity', 'Delete': 'DeleteActivity', 'ExecuteDataFlow': 'ExecuteDataFlowActivity', 'ExecuteSSISPackage': 'ExecuteSsisPackageActivity', 'GetMetadata': 'GetMetadataActivity', 'HDInsightHive': 'HdInsightHiveActivity', 'HDInsightMapReduce': 'HdInsightMapReduceActivity', 'HDInsightPig': 'HdInsightPigActivity', 'HDInsightSpark': 'HdInsightSparkActivity', 'HDInsightStreaming': 'HdInsightStreamingActivity', 'Lookup': 'LookupActivity', 'SqlServerStoredProcedure': 'SqlServerStoredProcedureActivity', 'WebActivity': 'WebActivity'} } def __init__( @@ -4859,7 +5338,7 @@ def __init__( self.secret_version = secret_version -class AzureMariaDBLinkedService(LinkedService): +class AzureMariaDbLinkedService(LinkedService): """Azure Database for MariaDB linked service. All required parameters must be populated in order to send to Azure. @@ -4917,14 +5396,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureMariaDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential -class AzureMariaDBSource(TabularSource): +class AzureMariaDbSource(TabularSource): """A copy activity Azure MariaDB source. All required parameters must be populated in order to send to Azure. @@ -4981,12 +5460,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMariaDBSource' # type: str self.query = query -class AzureMariaDBTableDataset(Dataset): +class AzureMariaDbTableDataset(Dataset): """Azure Database for MariaDB dataset. All required parameters must be populated in order to send to Azure. @@ -5049,7 +5528,7 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(AzureMariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureMariaDbTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureMariaDBTable' # type: str self.table_name = table_name @@ -5158,9 +5637,15 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference :param policy: Activity policy. :type policy: ~data_factory_management_client.models.ActivityPolicy - :param ml_pipeline_id: Required. ID of the published Azure ML pipeline. Type: string (or - Expression with resultType string). + :param ml_pipeline_id: ID of the published Azure ML pipeline. Type: string (or Expression with + resultType string). :type ml_pipeline_id: object + :param ml_pipeline_endpoint_id: ID of the published Azure ML pipeline endpoint. Type: string + (or Expression with resultType string). + :type ml_pipeline_endpoint_id: object + :param version: Version of the published Azure ML pipeline endpoint. Type: string (or + Expression with resultType string). + :type version: object :param experiment_name: Run history experiment name of the pipeline run. This information will be passed in the ExperimentName property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -5170,6 +5655,10 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): Values will be passed in the ParameterAssignments property of the published pipeline execution request. Type: object with key value pairs (or Expression with resultType object). :type ml_pipeline_parameters: object + :param data_path_assignments: Dictionary used for changing data path assignments without + retraining. Values will be passed in the dataPathAssignments property of the published pipeline + execution request. Type: object with key value pairs (or Expression with resultType object). + :type data_path_assignments: object :param ml_parent_run_id: The parent Azure ML Service pipeline run id. This information will be passed in the ParentRunId property of the published pipeline execution request. Type: string (or Expression with resultType string). @@ -5184,7 +5673,6 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): _validation = { 'name': {'required': True}, 'type': {'required': True}, - 'ml_pipeline_id': {'required': True}, } _attribute_map = { @@ -5197,8 +5685,11 @@ class AzureMlExecutePipelineActivity(ExecutionActivity): 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, 'policy': {'key': 'policy', 'type': 'ActivityPolicy'}, 'ml_pipeline_id': {'key': 'typeProperties.mlPipelineId', 'type': 'object'}, + 'ml_pipeline_endpoint_id': {'key': 'typeProperties.mlPipelineEndpointId', 'type': 'object'}, + 'version': {'key': 'typeProperties.version', 'type': 'object'}, 'experiment_name': {'key': 'typeProperties.experimentName', 'type': 'object'}, 'ml_pipeline_parameters': {'key': 'typeProperties.mlPipelineParameters', 'type': 'object'}, + 'data_path_assignments': {'key': 'typeProperties.dataPathAssignments', 'type': 'object'}, 'ml_parent_run_id': {'key': 'typeProperties.mlParentRunId', 'type': 'object'}, 'continue_on_step_failure': {'key': 'typeProperties.continueOnStepFailure', 'type': 'object'}, } @@ -5207,15 +5698,18 @@ def __init__( self, *, name: str, - ml_pipeline_id: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, depends_on: Optional[List["ActivityDependency"]] = None, user_properties: Optional[List["UserProperty"]] = None, linked_service_name: Optional["LinkedServiceReference"] = None, policy: Optional["ActivityPolicy"] = None, + ml_pipeline_id: Optional[object] = None, + ml_pipeline_endpoint_id: Optional[object] = None, + version: Optional[object] = None, experiment_name: Optional[object] = None, ml_pipeline_parameters: Optional[object] = None, + data_path_assignments: Optional[object] = None, ml_parent_run_id: Optional[object] = None, continue_on_step_failure: Optional[object] = None, **kwargs @@ -5223,8 +5717,11 @@ def __init__( super(AzureMlExecutePipelineActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'AzureMLExecutePipeline' # type: str self.ml_pipeline_id = ml_pipeline_id + self.ml_pipeline_endpoint_id = ml_pipeline_endpoint_id + self.version = version self.experiment_name = experiment_name self.ml_pipeline_parameters = ml_pipeline_parameters + self.data_path_assignments = data_path_assignments self.ml_parent_run_id = ml_parent_run_id self.continue_on_step_failure = continue_on_step_failure @@ -5529,7 +6026,7 @@ def __init__( self.linked_service_name = linked_service_name -class AzureMySQLLinkedService(LinkedService): +class AzureMySqlLinkedService(LinkedService): """Azure MySQL database linked service. All required parameters must be populated in order to send to Azure. @@ -5588,14 +6085,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureMySQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureMySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureMySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AzureMySQLSink(CopySink): +class AzureMySqlSink(CopySink): """A copy activity Azure MySql sink. All required parameters must be populated in order to send to Azure. @@ -5652,12 +6149,12 @@ def __init__( pre_copy_script: Optional[object] = None, **kwargs ): - super(AzureMySQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureMySqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureMySqlSink' # type: str self.pre_copy_script = pre_copy_script -class AzureMySQLSource(TabularSource): +class AzureMySqlSource(TabularSource): """A copy activity Azure MySQL source. All required parameters must be populated in order to send to Azure. @@ -5713,12 +6210,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzureMySQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureMySqlSource' # type: str self.query = query -class AzureMySQLTableDataset(Dataset): +class AzureMySqlTableDataset(Dataset): """The Azure MySQL database dataset. All required parameters must be populated in order to send to Azure. @@ -5787,13 +6284,13 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureMySQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureMySqlTable' # type: str self.table_name = table_name self.table = table -class AzurePostgreSQLLinkedService(LinkedService): +class AzurePostgreSqlLinkedService(LinkedService): """Azure PostgreSQL linked service. All required parameters must be populated in order to send to Azure. @@ -5851,14 +6348,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzurePostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzurePostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class AzurePostgreSQLSink(CopySink): +class AzurePostgreSqlSink(CopySink): """A copy activity Azure PostgreSQL sink. All required parameters must be populated in order to send to Azure. @@ -5915,12 +6412,12 @@ def __init__( pre_copy_script: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzurePostgreSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzurePostgreSqlSink' # type: str self.pre_copy_script = pre_copy_script -class AzurePostgreSQLSource(TabularSource): +class AzurePostgreSqlSource(TabularSource): """A copy activity Azure PostgreSQL source. All required parameters must be populated in order to send to Azure. @@ -5977,12 +6474,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzurePostgreSqlSource' # type: str self.query = query -class AzurePostgreSQLTableDataset(Dataset): +class AzurePostgreSqlTableDataset(Dataset): """Azure PostgreSQL dataset. All required parameters must be populated in order to send to Azure. @@ -6056,7 +6553,7 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(AzurePostgreSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzurePostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzurePostgreSqlTable' # type: str self.table_name = table_name self.table = table @@ -6318,7 +6815,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureSQLDatabaseLinkedService(LinkedService): +class AzureSqlDatabaseLinkedService(LinkedService): """Microsoft Azure SQL Database linked service. All required parameters must be populated in order to send to Azure. @@ -6398,7 +6895,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSQLDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlDatabase' # type: str self.connection_string = connection_string self.password = password @@ -6409,7 +6906,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureSQLDWLinkedService(LinkedService): +class AzureSqlDwLinkedService(LinkedService): """Azure SQL Data Warehouse linked service. All required parameters must be populated in order to send to Azure. @@ -6489,7 +6986,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSQLDWLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureSqlDwLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlDW' # type: str self.connection_string = connection_string self.password = password @@ -6500,7 +6997,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureSQLDWTableDataset(Dataset): +class AzureSqlDwTableDataset(Dataset): """The Azure SQL Data Warehouse dataset. All required parameters must be populated in order to send to Azure. @@ -6574,14 +7071,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSQLDWTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureSqlDwTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlDWTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSQLMiLinkedService(LinkedService): +class AzureSqlMiLinkedService(LinkedService): """Azure SQL Managed Instance linked service. All required parameters must be populated in order to send to Azure. @@ -6661,7 +7158,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(AzureSQLMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(AzureSqlMiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'AzureSqlMI' # type: str self.connection_string = connection_string self.password = password @@ -6672,7 +7169,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class AzureSQLMiTableDataset(Dataset): +class AzureSqlMiTableDataset(Dataset): """The Azure SQL Managed Instance dataset. All required parameters must be populated in order to send to Azure. @@ -6746,14 +7243,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSQLMiTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureSqlMiTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlMITable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class AzureSQLSink(CopySink): +class AzureSqlSink(CopySink): """A copy activity Azure SQL sink. All required parameters must be populated in order to send to Azure. @@ -6835,7 +7332,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(AzureSQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(AzureSqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'AzureSqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -6845,7 +7342,7 @@ def __init__( self.table_option = table_option -class AzureSQLSource(TabularSource): +class AzureSqlSource(TabularSource): """A copy activity Azure SQL source. All required parameters must be populated in order to send to Azure. @@ -6884,9 +7381,9 @@ class AzureSQLSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -6905,8 +7402,8 @@ class AzureSQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -6922,11 +7419,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(AzureSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'AzureSqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -6936,7 +7433,7 @@ def __init__( self.partition_settings = partition_settings -class AzureSQLTableDataset(Dataset): +class AzureSqlTableDataset(Dataset): """The Azure SQL Server database dataset. All required parameters must be populated in order to send to Azure. @@ -7010,7 +7507,7 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(AzureSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(AzureSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'AzureSqlTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema @@ -7713,7 +8210,7 @@ class MultiplePipelineTrigger(Trigger): """Base class for all triggers that support one to many model for trigger to pipeline. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + sub-classes are: BlobEventsTrigger, BlobTrigger, CustomEventsTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. @@ -7750,7 +8247,7 @@ class MultiplePipelineTrigger(Trigger): } _subtype_map = { - 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'CustomEventsTrigger': 'CustomEventsTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( @@ -8446,6 +8943,28 @@ def __init__( self.password = password +class CmkIdentityDefinition(msrest.serialization.Model): + """Managed Identity used for CMK. + + :param user_assigned_identity: The resource id of the user assigned identity to authenticate to + customer's key vault. + :type user_assigned_identity: str + """ + + _attribute_map = { + 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + } + + def __init__( + self, + *, + user_assigned_identity: Optional[str] = None, + **kwargs + ): + super(CmkIdentityDefinition, self).__init__(**kwargs) + self.user_assigned_identity = user_assigned_identity + + class CommonDataServiceForAppsEntityDataset(Dataset): """The Common Data Service for Apps entity dataset. @@ -8828,7 +9347,7 @@ class CompressionReadSettings(msrest.serialization.Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings. + sub-classes are: TarGZipReadSettings, TarReadSettings, ZipDeflateReadSettings. All required parameters must be populated in order to send to Azure. @@ -8849,7 +9368,7 @@ class CompressionReadSettings(msrest.serialization.Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__( @@ -8881,6 +9400,9 @@ class ConcurLinkedService(LinkedService): :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] :param annotations: List of tags that can be used for describing the linked service. :type annotations: list[object] + :param connection_properties: Properties used to connect to Concur. It is mutually exclusive + with any other properties in the linked service. Type: object. + :type connection_properties: object :param client_id: Required. Application client_id supplied by Concur App Management. :type client_id: object :param username: Required. The user name that you use to access Concur Service. @@ -8917,6 +9439,7 @@ class ConcurLinkedService(LinkedService): 'description': {'key': 'description', 'type': 'str'}, 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_properties': {'key': 'typeProperties.connectionProperties', 'type': 'object'}, 'client_id': {'key': 'typeProperties.clientId', 'type': 'object'}, 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, @@ -8936,6 +9459,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, + connection_properties: Optional[object] = None, password: Optional["SecretBase"] = None, use_encrypted_endpoints: Optional[object] = None, use_host_verification: Optional[object] = None, @@ -8945,6 +9469,7 @@ def __init__( ): super(ConcurLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'Concur' # type: str + self.connection_properties = connection_properties self.client_id = client_id self.username = username self.password = password @@ -9218,9 +9743,11 @@ class CopyActivity(ExecutionActivity): EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~data_factory_management_client.models.RedirectIncompatibleRowSettings - :param log_storage_settings: Log storage settings customer need to provide when enabling - session log. + :param log_storage_settings: (Deprecated. Please use LogSettings) Log storage settings customer + need to provide when enabling session log. :type log_storage_settings: ~data_factory_management_client.models.LogStorageSettings + :param log_settings: Log settings customer needs provide when enabling log. + :type log_settings: ~data_factory_management_client.models.LogSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. @@ -9260,6 +9787,7 @@ class CopyActivity(ExecutionActivity): 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, + 'log_settings': {'key': 'typeProperties.logSettings', 'type': 'LogSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, @@ -9288,6 +9816,7 @@ def __init__( enable_skip_incompatible_row: Optional[object] = None, redirect_incompatible_row_settings: Optional["RedirectIncompatibleRowSettings"] = None, log_storage_settings: Optional["LogStorageSettings"] = None, + log_settings: Optional["LogSettings"] = None, preserve_rules: Optional[List[object]] = None, preserve: Optional[List[object]] = None, validate_data_consistency: Optional[object] = None, @@ -9308,12 +9837,41 @@ def __init__( self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings self.log_storage_settings = log_storage_settings + self.log_settings = log_settings self.preserve_rules = preserve_rules self.preserve = preserve self.validate_data_consistency = validate_data_consistency self.skip_error_file = skip_error_file +class CopyActivityLogSettings(msrest.serialization.Model): + """Settings for copy activity log. + + :param log_level: Gets or sets the log level, support: Info, Warning. Type: string (or + Expression with resultType string). + :type log_level: object + :param enable_reliable_logging: Specifies whether to enable reliable logging. Type: boolean (or + Expression with resultType boolean). + :type enable_reliable_logging: object + """ + + _attribute_map = { + 'log_level': {'key': 'logLevel', 'type': 'object'}, + 'enable_reliable_logging': {'key': 'enableReliableLogging', 'type': 'object'}, + } + + def __init__( + self, + *, + log_level: Optional[object] = None, + enable_reliable_logging: Optional[object] = None, + **kwargs + ): + super(CopyActivityLogSettings, self).__init__(**kwargs) + self.log_level = log_level + self.enable_reliable_logging = enable_reliable_logging + + class CopyTranslator(msrest.serialization.Model): """A copy activity translator. @@ -9353,7 +9911,7 @@ def __init__( self.type = 'CopyTranslator' # type: str -class CosmosDBLinkedService(LinkedService): +class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. All required parameters must be populated in order to send to Azure. @@ -9421,7 +9979,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(CosmosDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(CosmosDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDb' # type: str self.connection_string = connection_string self.account_endpoint = account_endpoint @@ -9430,7 +9988,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class CosmosDBMongoDBApiCollectionDataset(Dataset): +class CosmosDbMongoDbApiCollectionDataset(Dataset): """The CosmosDB (MongoDB API) database dataset. All required parameters must be populated in order to send to Azure. @@ -9495,12 +10053,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(CosmosDBMongoDBApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(CosmosDbMongoDbApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CosmosDbMongoDbApiCollection' # type: str self.collection = collection -class CosmosDBMongoDBApiLinkedService(LinkedService): +class CosmosDbMongoDbApiLinkedService(LinkedService): """Linked service for CosmosDB (MongoDB API) data source. All required parameters must be populated in order to send to Azure. @@ -9556,13 +10114,13 @@ def __init__( annotations: Optional[List[object]] = None, **kwargs ): - super(CosmosDBMongoDBApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(CosmosDbMongoDbApiLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'CosmosDbMongoDbApi' # type: str self.connection_string = connection_string self.database = database -class CosmosDBMongoDBApiSink(CopySink): +class CosmosDbMongoDbApiSink(CopySink): """A copy activity sink for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -9620,12 +10178,12 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDBMongoDBApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbMongoDbApiSink' # type: str self.write_behavior = write_behavior -class CosmosDBMongoDBApiSource(CopySource): +class CosmosDbMongoDbApiSource(CopySource): """A copy activity source for a CosmosDB (MongoDB API) database. All required parameters must be populated in order to send to Azure. @@ -9649,7 +10207,7 @@ class CosmosDBMongoDBApiSource(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -9674,7 +10232,7 @@ class CosmosDBMongoDBApiSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -9688,13 +10246,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, filter: Optional[object] = None, - cursor_methods: Optional["MongoDBCursorMethodsProperties"] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDBMongoDBApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbMongoDbApiSource' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -9703,7 +10261,7 @@ def __init__( self.additional_columns = additional_columns -class CosmosDBSQLApiCollectionDataset(Dataset): +class CosmosDbSqlApiCollectionDataset(Dataset): """Microsoft Azure CosmosDB (SQL API) Collection dataset. All required parameters must be populated in order to send to Azure. @@ -9768,12 +10326,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(CosmosDBSQLApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(CosmosDbSqlApiCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'CosmosDbSqlApiCollection' # type: str self.collection_name = collection_name -class CosmosDBSQLApiSink(CopySink): +class CosmosDbSqlApiSink(CopySink): """A copy activity Azure CosmosDB (SQL API) Collection sink. All required parameters must be populated in order to send to Azure. @@ -9830,12 +10388,12 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(CosmosDBSQLApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbSqlApiSink' # type: str self.write_behavior = write_behavior -class CosmosDBSQLApiSource(CopySource): +class CosmosDbSqlApiSource(CopySource): """A copy activity Azure CosmosDB (SQL API) Collection source. All required parameters must be populated in order to send to Azure. @@ -9901,7 +10459,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(CosmosDBSQLApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'CosmosDbSqlApiSource' # type: str self.query = query self.page_size = page_size @@ -10116,18 +10674,16 @@ class CreateDataFlowDebugSessionRequest(msrest.serialization.Model): :type core_count: int :param time_to_live: Time to live setting of the cluster in minutes. :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param integration_runtime: Set to use integration runtime setting for data flow debug session. + :type integration_runtime: + ~data_factory_management_client.models.IntegrationRuntimeDebugResource """ _attribute_map = { 'compute_type': {'key': 'computeType', 'type': 'str'}, 'core_count': {'key': 'coreCount', 'type': 'int'}, 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, - 'name': {'key': 'integrationRuntime.name', 'type': 'str'}, - 'properties': {'key': 'integrationRuntime.properties', 'type': 'IntegrationRuntime'}, + 'integration_runtime': {'key': 'integrationRuntime', 'type': 'IntegrationRuntimeDebugResource'}, } def __init__( @@ -10136,16 +10692,14 @@ def __init__( compute_type: Optional[str] = None, core_count: Optional[int] = None, time_to_live: Optional[int] = None, - name: Optional[str] = None, - properties: Optional["IntegrationRuntime"] = None, + integration_runtime: Optional["IntegrationRuntimeDebugResource"] = None, **kwargs ): super(CreateDataFlowDebugSessionRequest, self).__init__(**kwargs) self.compute_type = compute_type self.core_count = core_count self.time_to_live = time_to_live - self.name = name - self.properties = properties + self.integration_runtime = integration_runtime class CreateDataFlowDebugSessionResponse(msrest.serialization.Model): @@ -10279,6 +10833,9 @@ class CustomActivity(ExecutionActivity): :param retention_time_in_days: The retention time for the files submitted for custom activity. Type: double (or Expression with resultType double). :type retention_time_in_days: object + :param auto_user_specification: Elevation level and scope for the user, default is nonadmin + task. Type: string (or Expression with resultType double). + :type auto_user_specification: object """ _validation = { @@ -10302,6 +10859,7 @@ class CustomActivity(ExecutionActivity): 'reference_objects': {'key': 'typeProperties.referenceObjects', 'type': 'CustomActivityReferenceObject'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': '{object}'}, 'retention_time_in_days': {'key': 'typeProperties.retentionTimeInDays', 'type': 'object'}, + 'auto_user_specification': {'key': 'typeProperties.autoUserSpecification', 'type': 'object'}, } def __init__( @@ -10320,6 +10878,7 @@ def __init__( reference_objects: Optional["CustomActivityReferenceObject"] = None, extended_properties: Optional[Dict[str, object]] = None, retention_time_in_days: Optional[object] = None, + auto_user_specification: Optional[object] = None, **kwargs ): super(CustomActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -10330,6 +10889,7 @@ def __init__( self.reference_objects = reference_objects self.extended_properties = extended_properties self.retention_time_in_days = retention_time_in_days + self.auto_user_specification = auto_user_specification class CustomActivityReferenceObject(msrest.serialization.Model): @@ -10479,6 +11039,80 @@ def __init__( self.type_properties = type_properties +class CustomEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a custom event is received. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~data_factory_management_client.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[~data_factory_management_client.models.TriggerPipelineReference] + :param subject_begins_with: The event subject must begin with the pattern provided for trigger + to fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_begins_with: str + :param subject_ends_with: The event subject must end with the pattern provided for trigger to + fire. At least one of these must be provided: subjectBeginsWith, subjectEndsWith. + :type subject_ends_with: str + :param events: Required. The list of event types that cause this trigger to fire. + :type events: list[object] + :param scope: Required. The ARM resource ID of the Azure Event Grid Topic. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[TriggerPipelineReference]'}, + 'subject_begins_with': {'key': 'typeProperties.subjectBeginsWith', 'type': 'str'}, + 'subject_ends_with': {'key': 'typeProperties.subjectEndsWith', 'type': 'str'}, + 'events': {'key': 'typeProperties.events', 'type': '[object]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + *, + events: List[object], + scope: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List["TriggerPipelineReference"]] = None, + subject_begins_with: Optional[str] = None, + subject_ends_with: Optional[str] = None, + **kwargs + ): + super(CustomEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type = 'CustomEventsTrigger' # type: str + self.subject_begins_with = subject_begins_with + self.subject_ends_with = subject_ends_with + self.events = events + self.scope = scope + + class DatabricksNotebookActivity(ExecutionActivity): """DatabricksNotebook activity. @@ -10847,90 +11481,84 @@ def __init__( class DataFlowDebugPackage(msrest.serialization.Model): """Request body structure for starting data flow debug session. - Variables are only populated by the server, and will be ignored when sending a request. - :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] :param session_id: The ID of data flow debug session. :type session_id: str + :param data_flow: Data flow instance. + :type data_flow: ~data_factory_management_client.models.DataFlowDebugResource :param datasets: List of datasets. :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] :param linked_services: List of linked services. :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters_debug_settings_parameters: Data flow parameters. - :type parameters_debug_settings_parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters_staging_linked_service_parameters: Arguments for LinkedService. - :type parameters_staging_linked_service_parameters: dict[str, object] - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param staging: Staging info for debug session. + :type staging: ~data_factory_management_client.models.DataFlowStagingInfo + :param debug_settings: Data flow debug settings. + :type debug_settings: ~data_factory_management_client.models.DataFlowDebugPackageDebugSettings """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, + 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, - 'source_settings': {'key': 'debugSettings.sourceSettings', 'type': '[DataFlowSourceSetting]'}, - 'parameters_debug_settings_parameters': {'key': 'debugSettings.parameters', 'type': '{object}'}, - 'dataset_parameters': {'key': 'debugSettings.datasetParameters', 'type': 'object'}, - 'folder_path': {'key': 'staging.folderPath', 'type': 'object'}, - 'type': {'key': 'staging.linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'staging.linkedService.referenceName', 'type': 'str'}, - 'parameters_staging_linked_service_parameters': {'key': 'staging.linkedService.parameters', 'type': '{object}'}, - 'name': {'key': 'dataFlow.name', 'type': 'str'}, - 'properties': {'key': 'dataFlow.properties', 'type': 'DataFlow'}, + 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, + 'debug_settings': {'key': 'debugSettings', 'type': 'DataFlowDebugPackageDebugSettings'}, } - type = "LinkedServiceReference" - def __init__( self, *, additional_properties: Optional[Dict[str, object]] = None, session_id: Optional[str] = None, + data_flow: Optional["DataFlowDebugResource"] = None, datasets: Optional[List["DatasetDebugResource"]] = None, linked_services: Optional[List["LinkedServiceDebugResource"]] = None, - source_settings: Optional[List["DataFlowSourceSetting"]] = None, - parameters_debug_settings_parameters: Optional[Dict[str, object]] = None, - dataset_parameters: Optional[object] = None, - folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - parameters_staging_linked_service_parameters: Optional[Dict[str, object]] = None, - name: Optional[str] = None, - properties: Optional["DataFlow"] = None, + staging: Optional["DataFlowStagingInfo"] = None, + debug_settings: Optional["DataFlowDebugPackageDebugSettings"] = None, **kwargs ): super(DataFlowDebugPackage, self).__init__(**kwargs) self.additional_properties = additional_properties self.session_id = session_id + self.data_flow = data_flow self.datasets = datasets self.linked_services = linked_services + self.staging = staging + self.debug_settings = debug_settings + + +class DataFlowDebugPackageDebugSettings(msrest.serialization.Model): + """Data flow debug settings. + + :param source_settings: Source setting for data flow debug. + :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] + :param parameters: Data flow parameters. + :type parameters: dict[str, object] + :param dataset_parameters: Parameters for dataset. + :type dataset_parameters: object + """ + + _attribute_map = { + 'source_settings': {'key': 'sourceSettings', 'type': '[DataFlowSourceSetting]'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + 'dataset_parameters': {'key': 'datasetParameters', 'type': 'object'}, + } + + def __init__( + self, + *, + source_settings: Optional[List["DataFlowSourceSetting"]] = None, + parameters: Optional[Dict[str, object]] = None, + dataset_parameters: Optional[object] = None, + **kwargs + ): + super(DataFlowDebugPackageDebugSettings, self).__init__(**kwargs) self.source_settings = source_settings - self.parameters_debug_settings_parameters = parameters_debug_settings_parameters + self.parameters = parameters self.dataset_parameters = dataset_parameters - self.folder_path = folder_path - self.reference_name = reference_name - self.parameters_staging_linked_service_parameters = parameters_staging_linked_service_parameters - self.name = name - self.properties = properties class SubResourceDebugResource(msrest.serialization.Model): @@ -11393,44 +12021,28 @@ def __init__( class DataFlowStagingInfo(msrest.serialization.Model): """Staging info for execute data flow activity. - Variables are only populated by the server, and will be ignored when sending a request. - + :param linked_service: Staging linked service reference. + :type linked_service: ~data_factory_management_client.models.LinkedServiceReference :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType string). :type folder_path: object - :ivar type: Linked service reference type. Default value: "LinkedServiceReference". - :vartype type: str - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param parameters: Arguments for LinkedService. - :type parameters: dict[str, object] """ - _validation = { - 'type': {'constant': True}, - } - _attribute_map = { + 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'type': {'key': 'linkedService.type', 'type': 'str'}, - 'reference_name': {'key': 'linkedService.referenceName', 'type': 'str'}, - 'parameters': {'key': 'linkedService.parameters', 'type': '{object}'}, } - type = "LinkedServiceReference" - def __init__( self, *, + linked_service: Optional["LinkedServiceReference"] = None, folder_path: Optional[object] = None, - reference_name: Optional[str] = None, - parameters: Optional[Dict[str, object]] = None, **kwargs ): super(DataFlowStagingInfo, self).__init__(**kwargs) + self.linked_service = linked_service self.folder_path = folder_path - self.reference_name = reference_name - self.parameters = parameters class DataLakeAnalyticsUsqlActivity(ExecutionActivity): @@ -11536,7 +12148,7 @@ class DatasetCompression(msrest.serialization.Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetZipDeflateCompression. + sub-classes are: DatasetBZip2Compression, DatasetDeflateCompression, DatasetGZipCompression, DatasetTarCompression, DatasetTarGZipCompression, DatasetZipDeflateCompression. All required parameters must be populated in order to send to Azure. @@ -11557,7 +12169,7 @@ class DatasetCompression(msrest.serialization.Model): } _subtype_map = { - 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} + 'type': {'BZip2': 'DatasetBZip2Compression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'Tar': 'DatasetTarCompression', 'TarGZip': 'DatasetTarGZipCompression', 'ZipDeflate': 'DatasetZipDeflateCompression'} } def __init__( @@ -11901,6 +12513,73 @@ def __init__( self.type = type +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' # type: str + + +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset compression.Constant filled by server. + :type type: str + :param level: The TarGZip compression level. Possible values include: "Optimal", "Fastest". + :type level: str or ~data_factory_management_client.models.DatasetCompressionLevel + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'level': {'key': 'level', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + level: Optional[Union[str, "DatasetCompressionLevel"]] = None, + **kwargs + ): + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZip' # type: str + self.level = level + + class DatasetZipDeflateCompression(DatasetCompression): """The ZipDeflate compression method used on a dataset. @@ -12335,7 +13014,7 @@ class DelimitedTextDataset(Dataset): resultType string). :type encoding_name: object :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec :param compression_level: The data compression method used for DelimitedText. Possible values include: "Optimal", "Fastest". @@ -12602,6 +13281,13 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object """ _validation = { @@ -12614,6 +13300,8 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__( @@ -12622,12 +13310,16 @@ def __init__( file_extension: object, additional_properties: Optional[Dict[str, object]] = None, quote_all_text: Optional[object] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, **kwargs ): super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.type = 'DelimitedTextWriteSettings' # type: str self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix class DependencyReference(msrest.serialization.Model): @@ -12704,7 +13396,7 @@ def __init__( self.distcp_options = distcp_options -class DocumentDBCollectionDataset(Dataset): +class DocumentDbCollectionDataset(Dataset): """Microsoft Azure Document Database Collection dataset. All required parameters must be populated in order to send to Azure. @@ -12769,12 +13461,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(DocumentDBCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(DocumentDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'DocumentDbCollection' # type: str self.collection_name = collection_name -class DocumentDBCollectionSink(CopySink): +class DocumentDbCollectionSink(CopySink): """A copy activity Document Database Collection sink. All required parameters must be populated in order to send to Azure. @@ -12836,13 +13528,13 @@ def __init__( write_behavior: Optional[object] = None, **kwargs ): - super(DocumentDBCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DocumentDbCollectionSink' # type: str self.nesting_separator = nesting_separator self.write_behavior = write_behavior -class DocumentDBCollectionSource(CopySource): +class DocumentDbCollectionSource(CopySource): """A copy activity Document Database Collection source. All required parameters must be populated in order to send to Azure. @@ -12903,7 +13595,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(DocumentDBCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'DocumentDbCollectionSource' # type: str self.query = query self.nesting_separator = nesting_separator @@ -13118,7 +13810,7 @@ def __init__( self.schema_type_properties_schema = schema_type_properties_schema -class DWCopyCommandDefaultValue(msrest.serialization.Model): +class DwCopyCommandDefaultValue(msrest.serialization.Model): """Default value. :param column_name: Column name. Type: object (or Expression with resultType string). @@ -13140,19 +13832,19 @@ def __init__( default_value: Optional[object] = None, **kwargs ): - super(DWCopyCommandDefaultValue, self).__init__(**kwargs) + super(DwCopyCommandDefaultValue, self).__init__(**kwargs) self.column_name = column_name self.default_value = default_value -class DWCopyCommandSettings(msrest.serialization.Model): +class DwCopyCommandSettings(msrest.serialization.Model): """DW Copy Command settings. :param default_values: Specifies the default values for each target column in SQL DW. The default values in the property overwrite the DEFAULT constraint set in the DB, and identity column cannot have a default value. Type: array of objects (or Expression with resultType array of objects). - :type default_values: list[~data_factory_management_client.models.DWCopyCommandDefaultValue] + :type default_values: list[~data_factory_management_client.models.DwCopyCommandDefaultValue] :param additional_options: Additional options directly passed to SQL DW in Copy Command. Type: key value pairs (value should be string type) (or Expression with resultType object). Example: "additionalOptions": { "MAXERRORS": "1000", "DATEFORMAT": "'ymd'" }. @@ -13160,18 +13852,18 @@ class DWCopyCommandSettings(msrest.serialization.Model): """ _attribute_map = { - 'default_values': {'key': 'defaultValues', 'type': '[DWCopyCommandDefaultValue]'}, + 'default_values': {'key': 'defaultValues', 'type': '[DwCopyCommandDefaultValue]'}, 'additional_options': {'key': 'additionalOptions', 'type': '{str}'}, } def __init__( self, *, - default_values: Optional[List["DWCopyCommandDefaultValue"]] = None, + default_values: Optional[List["DwCopyCommandDefaultValue"]] = None, additional_options: Optional[Dict[str, str]] = None, **kwargs ): - super(DWCopyCommandSettings, self).__init__(**kwargs) + super(DwCopyCommandSettings, self).__init__(**kwargs) self.default_values = default_values self.additional_options = additional_options @@ -14303,6 +14995,52 @@ def __init__( self.query = query +class EncryptionConfiguration(msrest.serialization.Model): + """Definition of CMK for the factory. + + All required parameters must be populated in order to send to Azure. + + :param key_name: Required. The name of the key in Azure Key Vault to use as Customer Managed + Key. + :type key_name: str + :param vault_base_url: Required. The url of the Azure Key Vault used for CMK. + :type vault_base_url: str + :param key_version: The version of the key used for CMK. If not provided, latest version will + be used. + :type key_version: str + :param identity: User assigned identity to use to authenticate to customer's key vault. If not + provided Managed Service Identity will be used. + :type identity: ~data_factory_management_client.models.CmkIdentityDefinition + """ + + _validation = { + 'key_name': {'required': True}, + 'vault_base_url': {'required': True}, + } + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + 'vault_base_url': {'key': 'vaultBaseUrl', 'type': 'str'}, + 'key_version': {'key': 'keyVersion', 'type': 'str'}, + 'identity': {'key': 'identity', 'type': 'CmkIdentityDefinition'}, + } + + def __init__( + self, + *, + key_name: str, + vault_base_url: str, + key_version: Optional[str] = None, + identity: Optional["CmkIdentityDefinition"] = None, + **kwargs + ): + super(EncryptionConfiguration, self).__init__(**kwargs) + self.key_name = key_name + self.vault_base_url = vault_base_url + self.key_version = key_version + self.identity = identity + + class EntityReference(msrest.serialization.Model): """The entity reference. @@ -14553,6 +15291,16 @@ class ExecuteDataFlowActivity(ExecutionActivity): :param compute: Compute properties for data flow activity. :type compute: ~data_factory_management_client.models.ExecuteDataFlowActivityTypePropertiesCompute + :param trace_level: Trace level setting used for data flow monitoring output. Supported values + are: 'coarse', 'fine', and 'none'. Type: string (or Expression with resultType string). + :type trace_level: object + :param continue_on_error: Continue on error setting used for data flow execution. Enables + processing to continue if a sink fails. Type: boolean (or Expression with resultType boolean). + :type continue_on_error: object + :param run_concurrently: Concurrent run setting used for data flow execution. Allows sinks with + the same save order to be processed concurrently. Type: boolean (or Expression with resultType + boolean). + :type run_concurrently: object """ _validation = { @@ -14574,6 +15322,9 @@ class ExecuteDataFlowActivity(ExecutionActivity): 'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'}, 'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'}, 'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'}, + 'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'}, + 'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'}, + 'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'}, } def __init__( @@ -14590,6 +15341,9 @@ def __init__( staging: Optional["DataFlowStagingInfo"] = None, integration_runtime: Optional["IntegrationRuntimeReference"] = None, compute: Optional["ExecuteDataFlowActivityTypePropertiesCompute"] = None, + trace_level: Optional[object] = None, + continue_on_error: Optional[object] = None, + run_concurrently: Optional[object] = None, **kwargs ): super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) @@ -14598,29 +15352,33 @@ def __init__( self.staging = staging self.integration_runtime = integration_runtime self.compute = compute + self.trace_level = trace_level + self.continue_on_error = continue_on_error + self.run_concurrently = run_concurrently class ExecuteDataFlowActivityTypePropertiesCompute(msrest.serialization.Model): """Compute properties for data flow activity. :param compute_type: Compute type of the cluster which will execute data flow job. Possible - values include: "General", "MemoryOptimized", "ComputeOptimized". - :type compute_type: str or ~data_factory_management_client.models.DataFlowComputeType + values include: 'General', 'MemoryOptimized', 'ComputeOptimized'. Type: string (or Expression + with resultType string). + :type compute_type: object :param core_count: Core count of the cluster which will execute data flow job. Supported values - are: 8, 16, 32, 48, 80, 144 and 272. - :type core_count: int + are: 8, 16, 32, 48, 80, 144 and 272. Type: integer (or Expression with resultType integer). + :type core_count: object """ _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'compute_type': {'key': 'computeType', 'type': 'object'}, + 'core_count': {'key': 'coreCount', 'type': 'object'}, } def __init__( self, *, - compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, - core_count: Optional[int] = None, + compute_type: Optional[object] = None, + core_count: Optional[object] = None, **kwargs ): super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs) @@ -14819,45 +15577,6 @@ def __init__( self.log_location = log_location -class ExportSettings(msrest.serialization.Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The export setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'ExportSettings' # type: str - - class ExposureControlBatchRequest(msrest.serialization.Model): """A list of exposure control features. @@ -15089,6 +15808,11 @@ class Factory(Resource): :param global_parameters: List of parameters for factory. :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] + :param encryption: Properties to enable Customer Managed Key for the factory. + :type encryption: ~data_factory_management_client.models.EncryptionConfiguration + :param public_network_access: Whether or not public network access is allowed for the data + factory. Possible values include: "Enabled", "Disabled". + :type public_network_access: str or ~data_factory_management_client.models.PublicNetworkAccess """ _validation = { @@ -15115,6 +15839,8 @@ class Factory(Resource): 'version': {'key': 'properties.version', 'type': 'str'}, 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, 'global_parameters': {'key': 'properties.globalParameters', 'type': '{GlobalParameterSpecification}'}, + 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionConfiguration'}, + 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, } def __init__( @@ -15126,6 +15852,8 @@ def __init__( identity: Optional["FactoryIdentity"] = None, repo_configuration: Optional["FactoryRepoConfiguration"] = None, global_parameters: Optional[Dict[str, "GlobalParameterSpecification"]] = None, + encryption: Optional["EncryptionConfiguration"] = None, + public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, **kwargs ): super(Factory, self).__init__(location=location, tags=tags, **kwargs) @@ -15136,6 +15864,8 @@ def __init__( self.version = None self.repo_configuration = repo_configuration self.global_parameters = global_parameters + self.encryption = encryption + self.public_network_access = public_network_access class FactoryRepoConfiguration(msrest.serialization.Model): @@ -15262,13 +15992,14 @@ class FactoryIdentity(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. - Default value: "SystemAssigned". + :ivar type: Required. The identity type. Default value: "SystemAssigned". :vartype type: str :ivar principal_id: The principal id of the identity. :vartype principal_id: str :ivar tenant_id: The client tenant id of the identity. :vartype tenant_id: str + :param user_assigned_identities: List of user assigned identities for the factory. + :type user_assigned_identities: dict[str, object] """ _validation = { @@ -15281,17 +16012,21 @@ class FactoryIdentity(msrest.serialization.Model): 'type': {'key': 'type', 'type': 'str'}, 'principal_id': {'key': 'principalId', 'type': 'str'}, 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, } type = "SystemAssigned" def __init__( self, + *, + user_assigned_identities: Optional[Dict[str, object]] = None, **kwargs ): super(FactoryIdentity, self).__init__(**kwargs) self.principal_id = None self.tenant_id = None + self.user_assigned_identities = user_assigned_identities class FactoryListResponse(msrest.serialization.Model): @@ -20030,45 +20765,6 @@ def __init__( self.query = query -class ImportSettings(msrest.serialization.Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand. - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param type: Required. The import setting type.Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__( - self, - *, - additional_properties: Optional[Dict[str, object]] = None, - **kwargs - ): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = 'ImportSettings' # type: str - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -20949,6 +21645,9 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): include: "Basic", "Standard", "Premium", "PremiumRS". :type catalog_pricing_tier: str or ~data_factory_management_client.models.IntegrationRuntimeSsisCatalogPricingTier + :param dual_standby_pair_name: The dual standby pair name of Azure-SSIS Integration Runtimes to + support SSISDB failover. + :type dual_standby_pair_name: str """ _validation = { @@ -20961,6 +21660,7 @@ class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'SecureString'}, 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + 'dual_standby_pair_name': {'key': 'dualStandbyPairName', 'type': 'str'}, } def __init__( @@ -20971,6 +21671,7 @@ def __init__( catalog_admin_user_name: Optional[str] = None, catalog_admin_password: Optional["SecureString"] = None, catalog_pricing_tier: Optional[Union[str, "IntegrationRuntimeSsisCatalogPricingTier"]] = None, + dual_standby_pair_name: Optional[str] = None, **kwargs ): super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) @@ -20979,6 +21680,7 @@ def __init__( self.catalog_admin_user_name = catalog_admin_user_name self.catalog_admin_password = catalog_admin_password self.catalog_pricing_tier = catalog_pricing_tier + self.dual_standby_pair_name = dual_standby_pair_name class IntegrationRuntimeSsisProperties(msrest.serialization.Model): @@ -22113,8 +22815,81 @@ def __init__( self.properties = properties +class LogLocationSettings(msrest.serialization.Model): + """Log location settings. + + All required parameters must be populated in order to send to Azure. + + :param linked_service_name: Required. Log storage linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param path: The path to storage for storing detailed logs of activity execution. Type: string + (or Expression with resultType string). + :type path: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'path': {'key': 'path', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + path: Optional[object] = None, + **kwargs + ): + super(LogLocationSettings, self).__init__(**kwargs) + self.linked_service_name = linked_service_name + self.path = path + + +class LogSettings(msrest.serialization.Model): + """Log settings. + + All required parameters must be populated in order to send to Azure. + + :param enable_copy_activity_log: Specifies whether to enable copy activity log. Type: boolean + (or Expression with resultType boolean). + :type enable_copy_activity_log: object + :param copy_activity_log_settings: Specifies settings for copy activity log. + :type copy_activity_log_settings: + ~data_factory_management_client.models.CopyActivityLogSettings + :param log_location_settings: Required. Log location settings customer needs to provide when + enabling log. + :type log_location_settings: ~data_factory_management_client.models.LogLocationSettings + """ + + _validation = { + 'log_location_settings': {'required': True}, + } + + _attribute_map = { + 'enable_copy_activity_log': {'key': 'enableCopyActivityLog', 'type': 'object'}, + 'copy_activity_log_settings': {'key': 'copyActivityLogSettings', 'type': 'CopyActivityLogSettings'}, + 'log_location_settings': {'key': 'logLocationSettings', 'type': 'LogLocationSettings'}, + } + + def __init__( + self, + *, + log_location_settings: "LogLocationSettings", + enable_copy_activity_log: Optional[object] = None, + copy_activity_log_settings: Optional["CopyActivityLogSettings"] = None, + **kwargs + ): + super(LogSettings, self).__init__(**kwargs) + self.enable_copy_activity_log = enable_copy_activity_log + self.copy_activity_log_settings = copy_activity_log_settings + self.log_location_settings = log_location_settings + + class LogStorageSettings(msrest.serialization.Model): - """Log storage settings. + """(Deprecated. Please use LogSettings) Log storage settings. All required parameters must be populated in order to send to Azure. @@ -22471,6 +23246,9 @@ class ManagedIntegrationRuntime(IntegrationRuntime): Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". :vartype state: str or ~data_factory_management_client.models.IntegrationRuntimeState + :param managed_virtual_network: Managed Virtual Network reference. + :type managed_virtual_network: + ~data_factory_management_client.models.ManagedVirtualNetworkReference :param compute_properties: The compute resource for managed integration runtime. :type compute_properties: ~data_factory_management_client.models.IntegrationRuntimeComputeProperties @@ -22488,6 +23266,7 @@ class ManagedIntegrationRuntime(IntegrationRuntime): 'type': {'key': 'type', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, + 'managed_virtual_network': {'key': 'managedVirtualNetwork', 'type': 'ManagedVirtualNetworkReference'}, 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, } @@ -22497,6 +23276,7 @@ def __init__( *, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, + managed_virtual_network: Optional["ManagedVirtualNetworkReference"] = None, compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, ssis_properties: Optional["IntegrationRuntimeSsisProperties"] = None, **kwargs @@ -22504,6 +23284,7 @@ def __init__( super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) self.type = 'Managed' # type: str self.state = None + self.managed_virtual_network = managed_virtual_network self.compute_properties = compute_properties self.ssis_properties = ssis_properties @@ -22817,6 +23598,8 @@ class ManagedPrivateEndpointResource(SubResource): Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: The resource identifier. :vartype id: str :ivar name: The resource name. @@ -22825,19 +23608,8 @@ class ManagedPrivateEndpointResource(SubResource): :vartype type: str :ivar etag: Etag identifies change in the resource. :vartype etag: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :ivar is_reserved: Denotes whether the managed private endpoint is reserved. - :vartype is_reserved: bool - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str - :ivar provisioning_state: The managed private endpoint provisioning state. - :vartype provisioning_state: str + :param properties: Required. Managed private endpoint properties. + :type properties: ~data_factory_management_client.models.ManagedPrivateEndpoint """ _validation = { @@ -22845,8 +23617,7 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, - 'is_reserved': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { @@ -22854,30 +23625,17 @@ class ManagedPrivateEndpointResource(SubResource): 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, - 'connection_state': {'key': 'properties.connectionState', 'type': 'ConnectionStateProperties'}, - 'fqdns': {'key': 'properties.fqdns', 'type': '[str]'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'is_reserved': {'key': 'properties.isReserved', 'type': 'bool'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'ManagedPrivateEndpoint'}, } def __init__( self, *, - connection_state: Optional["ConnectionStateProperties"] = None, - fqdns: Optional[List[str]] = None, - group_id: Optional[str] = None, - private_link_resource_id: Optional[str] = None, + properties: "ManagedPrivateEndpoint", **kwargs ): super(ManagedPrivateEndpointResource, self).__init__(**kwargs) - self.connection_state = connection_state - self.fqdns = fqdns - self.group_id = group_id - self.is_reserved = None - self.private_link_resource_id = private_link_resource_id - self.provisioning_state = None + self.properties = properties class ManagedVirtualNetwork(msrest.serialization.Model): @@ -22949,6 +23707,42 @@ def __init__( self.next_link = next_link +class ManagedVirtualNetworkReference(msrest.serialization.Model): + """Managed Virtual Network reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Managed Virtual Network reference type. Default value: + "ManagedVirtualNetworkReference". + :vartype type: str + :param reference_name: Required. Reference ManagedVirtualNetwork name. + :type reference_name: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + type = "ManagedVirtualNetworkReference" + + def __init__( + self, + *, + reference_name: str, + **kwargs + ): + super(ManagedVirtualNetworkReference, self).__init__(**kwargs) + self.reference_name = reference_name + + class ManagedVirtualNetworkResource(SubResource): """Managed Virtual Network resource type. @@ -23047,7 +23841,7 @@ def __init__( self.script = script -class MariaDBLinkedService(LinkedService): +class MariaDbLinkedService(LinkedService): """MariaDB server linked service. All required parameters must be populated in order to send to Azure. @@ -23105,14 +23899,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MariaDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MariaDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MariaDB' # type: str self.connection_string = connection_string self.pwd = pwd self.encrypted_credential = encrypted_credential -class MariaDBSource(TabularSource): +class MariaDbSource(TabularSource): """A copy activity MariaDB server source. All required parameters must be populated in order to send to Azure. @@ -23169,12 +23963,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MariaDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MariaDBSource' # type: str self.query = query -class MariaDBTableDataset(Dataset): +class MariaDbTableDataset(Dataset): """MariaDB server dataset. All required parameters must be populated in order to send to Azure. @@ -23237,7 +24031,7 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MariaDBTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MariaDbTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MariaDBTable' # type: str self.table_name = table_name @@ -23694,14 +24488,84 @@ class MicrosoftAccessTableDataset(Dataset): :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the root level. :type folder: ~data_factory_management_client.models.DatasetFolder - :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType - string). - :type table_name: object + :param table_name: The Microsoft Access table name. Type: string (or Expression with resultType + string). + :type table_name: object + """ + + _validation = { + 'type': {'required': True}, + 'linked_service_name': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + } + + def __init__( + self, + *, + linked_service_name: "LinkedServiceReference", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + structure: Optional[object] = None, + schema: Optional[object] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + folder: Optional["DatasetFolder"] = None, + table_name: Optional[object] = None, + **kwargs + ): + super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MicrosoftAccessTable' # type: str + self.table_name = table_name + + +class MongoDbAtlasCollectionDataset(Dataset): + """The MongoDB Atlas database dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset.Constant filled by server. + :type type: str + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: array (or Expression + with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the dataset. Type: array (or + Expression with resultType array), itemType: DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: ~data_factory_management_client.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, Dataset will appear at the + root level. + :type folder: ~data_factory_management_client.models.DatasetFolder + :param collection: Required. The collection name of the MongoDB Atlas database. Type: string + (or Expression with resultType string). + :type collection: object """ _validation = { 'type': {'required': True}, 'linked_service_name': {'required': True}, + 'collection': {'required': True}, } _attribute_map = { @@ -23714,13 +24578,14 @@ class MicrosoftAccessTableDataset(Dataset): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, - 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'collection': {'key': 'typeProperties.collection', 'type': 'object'}, } def __init__( self, *, linked_service_name: "LinkedServiceReference", + collection: object, additional_properties: Optional[Dict[str, object]] = None, description: Optional[str] = None, structure: Optional[object] = None, @@ -23728,15 +24593,154 @@ def __init__( parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, folder: Optional["DatasetFolder"] = None, - table_name: Optional[object] = None, **kwargs ): - super(MicrosoftAccessTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) - self.type = 'MicrosoftAccessTable' # type: str - self.table_name = table_name + super(MongoDbAtlasCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.type = 'MongoDbAtlasCollection' # type: str + self.collection = collection + + +class MongoDbAtlasLinkedService(LinkedService): + """Linked service for MongoDB Atlas data source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of linked service.Constant filled by server. + :type type: str + :param connect_via: The integration runtime reference. + :type connect_via: ~data_factory_management_client.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, ~data_factory_management_client.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the linked service. + :type annotations: list[object] + :param connection_string: Required. The MongoDB Atlas connection string. Type: string, + SecureString or AzureKeyVaultSecretReference. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param database: Required. The name of the MongoDB Atlas database that you want to access. + Type: string (or Expression with resultType string). + :type database: object + """ + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + 'database': {'required': True}, + } -class MongoDBCollectionDataset(Dataset): + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__( + self, + *, + connection_string: object, + database: object, + additional_properties: Optional[Dict[str, object]] = None, + connect_via: Optional["IntegrationRuntimeReference"] = None, + description: Optional[str] = None, + parameters: Optional[Dict[str, "ParameterSpecification"]] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.type = 'MongoDbAtlas' # type: str + self.connection_string = connection_string + self.database = database + + +class MongoDbAtlasSource(CopySource): + """A copy activity source for a MongoDB Atlas database. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Copy source type.Constant filled by server. + :type type: str + :param source_retry_count: Source retry count. Type: integer (or Expression with resultType + integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression with resultType + string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count for the source data + store. Type: integer (or Expression with resultType integer). + :type max_concurrent_connections: object + :param filter: Specifies selection filter using query operators. To return all documents in a + collection, omit this parameter or pass an empty document ({}). Type: string (or Expression + with resultType string). + :type filter: object + :param cursor_methods: Cursor methods for Mongodb query. + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties + :param batch_size: Specifies the number of documents to return in each batch of the response + from MongoDB Atlas instance. In most cases, modifying the batch size will not affect the user + or the application. This property's main purpose is to avoid hit the limitation of response + size. Type: integer (or Expression with resultType integer). + :type batch_size: object + :param query_timeout: Query timeout. Type: string (or Expression with resultType string), + pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to source data. Type: + array of objects (or Expression with resultType array of objects). + :type additional_columns: list[~data_factory_management_client.models.AdditionalColumns] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'filter': {'key': 'filter', 'type': 'object'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, + 'batch_size': {'key': 'batchSize', 'type': 'object'}, + 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + source_retry_count: Optional[object] = None, + source_retry_wait: Optional[object] = None, + max_concurrent_connections: Optional[object] = None, + filter: Optional[object] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, + batch_size: Optional[object] = None, + query_timeout: Optional[object] = None, + additional_columns: Optional[List["AdditionalColumns"]] = None, + **kwargs + ): + super(MongoDbAtlasSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.type = 'MongoDbAtlasSource' # type: str + self.filter = filter + self.cursor_methods = cursor_methods + self.batch_size = batch_size + self.query_timeout = query_timeout + self.additional_columns = additional_columns + + +class MongoDbCollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -23801,12 +24805,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(MongoDBCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MongoDbCollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbCollection' # type: str self.collection_name = collection_name -class MongoDBCursorMethodsProperties(msrest.serialization.Model): +class MongoDbCursorMethodsProperties(msrest.serialization.Model): """Cursor methods for Mongodb query. :param additional_properties: Unmatched properties from the message are deserialized to this @@ -23847,7 +24851,7 @@ def __init__( limit: Optional[object] = None, **kwargs ): - super(MongoDBCursorMethodsProperties, self).__init__(**kwargs) + super(MongoDbCursorMethodsProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.project = project self.sort = sort @@ -23855,7 +24859,7 @@ def __init__( self.limit = limit -class MongoDBLinkedService(LinkedService): +class MongoDbLinkedService(LinkedService): """Linked service for MongoDb data source. All required parameters must be populated in order to send to Azure. @@ -23879,7 +24883,7 @@ class MongoDBLinkedService(LinkedService): :param authentication_type: The authentication type to be used to connect to the MongoDB database. Possible values include: "Basic", "Anonymous". :type authentication_type: str or - ~data_factory_management_client.models.MongoDBAuthenticationType + ~data_factory_management_client.models.MongoDbAuthenticationType :param database_name: Required. The name of the MongoDB database that you want to access. Type: string (or Expression with resultType string). :type database_name: object @@ -23941,7 +24945,7 @@ def __init__( description: Optional[str] = None, parameters: Optional[Dict[str, "ParameterSpecification"]] = None, annotations: Optional[List[object]] = None, - authentication_type: Optional[Union[str, "MongoDBAuthenticationType"]] = None, + authentication_type: Optional[Union[str, "MongoDbAuthenticationType"]] = None, username: Optional[object] = None, password: Optional["SecretBase"] = None, auth_source: Optional[object] = None, @@ -23951,7 +24955,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MongoDBLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MongoDbLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDb' # type: str self.server = server self.authentication_type = authentication_type @@ -23965,7 +24969,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class MongoDBSource(CopySource): +class MongoDbSource(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -24017,13 +25021,13 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'MongoDbSource' # type: str self.query = query self.additional_columns = additional_columns -class MongoDBV2CollectionDataset(Dataset): +class MongoDbV2CollectionDataset(Dataset): """The MongoDB database dataset. All required parameters must be populated in order to send to Azure. @@ -24088,12 +25092,12 @@ def __init__( folder: Optional["DatasetFolder"] = None, **kwargs ): - super(MongoDBV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MongoDbV2CollectionDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MongoDbV2Collection' # type: str self.collection = collection -class MongoDBV2LinkedService(LinkedService): +class MongoDbV2LinkedService(LinkedService): """Linked service for MongoDB data source. All required parameters must be populated in order to send to Azure. @@ -24148,13 +25152,13 @@ def __init__( annotations: Optional[List[object]] = None, **kwargs ): - super(MongoDBV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MongoDbV2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MongoDbV2' # type: str self.connection_string = connection_string self.database = database -class MongoDBV2Source(CopySource): +class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. All required parameters must be populated in order to send to Azure. @@ -24178,7 +25182,7 @@ class MongoDBV2Source(CopySource): with resultType string). :type filter: object :param cursor_methods: Cursor methods for Mongodb query. - :type cursor_methods: ~data_factory_management_client.models.MongoDBCursorMethodsProperties + :type cursor_methods: ~data_factory_management_client.models.MongoDbCursorMethodsProperties :param batch_size: Specifies the number of documents to return in each batch of the response from MongoDB instance. In most cases, modifying the batch size will not affect the user or the application. This property's main purpose is to avoid hit the limitation of response size. @@ -24203,7 +25207,7 @@ class MongoDBV2Source(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'filter': {'key': 'filter', 'type': 'object'}, - 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDBCursorMethodsProperties'}, + 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, @@ -24217,13 +25221,13 @@ def __init__( source_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, filter: Optional[object] = None, - cursor_methods: Optional["MongoDBCursorMethodsProperties"] = None, + cursor_methods: Optional["MongoDbCursorMethodsProperties"] = None, batch_size: Optional[object] = None, query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs ): - super(MongoDBV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'MongoDbV2Source' # type: str self.filter = filter self.cursor_methods = cursor_methods @@ -24232,7 +25236,7 @@ def __init__( self.additional_columns = additional_columns -class MySQLLinkedService(LinkedService): +class MySqlLinkedService(LinkedService): """Linked service for MySQL data source. All required parameters must be populated in order to send to Azure. @@ -24290,14 +25294,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(MySQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(MySqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'MySql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class MySQLSource(TabularSource): +class MySqlSource(TabularSource): """A copy activity source for MySQL databases. All required parameters must be populated in order to send to Azure. @@ -24353,12 +25357,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(MySQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'MySqlSource' # type: str self.query = query -class MySQLTableDataset(Dataset): +class MySqlTableDataset(Dataset): """The MySQL table dataset. All required parameters must be populated in order to send to Azure. @@ -24421,7 +25425,7 @@ def __init__( table_name: Optional[object] = None, **kwargs ): - super(MySQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(MySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'MySqlTable' # type: str self.table_name = table_name @@ -24557,7 +25561,7 @@ class NetezzaSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for Netezza read in parallel. Possible values include: "None", "DataSlice", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.NetezzaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Netezza source partitioning. :type partition_settings: ~data_factory_management_client.models.NetezzaPartitionSettings """ @@ -24575,7 +25579,7 @@ class NetezzaSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } @@ -24589,7 +25593,7 @@ def __init__( query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, - partition_option: Optional[Union[str, "NetezzaPartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["NetezzaPartitionSettings"] = None, **kwargs ): @@ -24728,7 +25732,7 @@ class ODataLinkedService(LinkedService): :param aad_service_principal_credential_type: Specify the credential type (key or cert) is used for service principal. Possible values include: "ServicePrincipalKey", "ServicePrincipalCert". :type aad_service_principal_credential_type: str or - ~data_factory_management_client.models.ODataAADServicePrincipalCredentialType + ~data_factory_management_client.models.ODataAadServicePrincipalCredentialType :param service_principal_key: Specify the secret of your application registered in Azure Active Directory. Type: string (or Expression with resultType string). :type service_principal_key: ~data_factory_management_client.models.SecretBase @@ -24790,7 +25794,7 @@ def __init__( service_principal_id: Optional[object] = None, azure_cloud_type: Optional[object] = None, aad_resource_id: Optional[object] = None, - aad_service_principal_credential_type: Optional[Union[str, "ODataAADServicePrincipalCredentialType"]] = None, + aad_service_principal_credential_type: Optional[Union[str, "ODataAadServicePrincipalCredentialType"]] = None, service_principal_key: Optional["SecretBase"] = None, service_principal_embedded_cert: Optional["SecretBase"] = None, service_principal_embedded_cert_password: Optional["SecretBase"] = None, @@ -26164,7 +27168,7 @@ class OracleSource(CopySource): :type query_timeout: object :param partition_option: The partition mechanism that will be used for Oracle read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.OraclePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Oracle source partitioning. :type partition_settings: ~data_factory_management_client.models.OraclePartitionSettings :param additional_columns: Specifies the additional columns to be added to source data. Type: @@ -26184,7 +27188,7 @@ class OracleSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'oracle_reader_query': {'key': 'oracleReaderQuery', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } @@ -26198,7 +27202,7 @@ def __init__( max_concurrent_connections: Optional[object] = None, oracle_reader_query: Optional[object] = None, query_timeout: Optional[object] = None, - partition_option: Optional[Union[str, "OraclePartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["OraclePartitionSettings"] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, **kwargs @@ -26322,7 +27326,7 @@ class OrcDataset(Dataset): :type folder: ~data_factory_management_client.models.DatasetFolder :param location: The location of the ORC data storage. :type location: ~data_factory_management_client.models.DatasetLocation - :param orc_compression_codec: Possible values include: "none", "zlib", "snappy". + :param orc_compression_codec: Possible values include: "none", "zlib", "snappy", "lzo". :type orc_compression_codec: str or ~data_factory_management_client.models.OrcCompressionCodec """ @@ -26432,6 +27436,8 @@ class OrcSink(CopySink): :type max_concurrent_connections: object :param store_settings: ORC store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~data_factory_management_client.models.OrcWriteSettings """ _validation = { @@ -26447,6 +27453,7 @@ class OrcSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__( @@ -26459,11 +27466,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["OrcWriteSettings"] = None, **kwargs ): super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'OrcSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class OrcSource(CopySource): @@ -26523,6 +27532,50 @@ def __init__( self.additional_columns = additional_columns +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'OrcWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PackageStore(msrest.serialization.Model): """Package store for the SSIS integration runtime. @@ -26619,7 +27672,7 @@ class ParquetDataset(Dataset): :param location: The location of the parquet storage. :type location: ~data_factory_management_client.models.DatasetLocation :param compression_codec: Possible values include: "none", "gzip", "snappy", "lzo", "bzip2", - "deflate", "zipDeflate", "lz4". + "deflate", "zipDeflate", "lz4", "tar", "tarGZip". :type compression_codec: str or ~data_factory_management_client.models.CompressionCodec """ @@ -26729,6 +27782,8 @@ class ParquetSink(CopySink): :type max_concurrent_connections: object :param store_settings: Parquet store settings. :type store_settings: ~data_factory_management_client.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~data_factory_management_client.models.ParquetWriteSettings """ _validation = { @@ -26744,6 +27799,7 @@ class ParquetSink(CopySink): 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__( @@ -26756,11 +27812,13 @@ def __init__( sink_retry_wait: Optional[object] = None, max_concurrent_connections: Optional[object] = None, store_settings: Optional["StoreWriteSettings"] = None, + format_settings: Optional["ParquetWriteSettings"] = None, **kwargs ): super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'ParquetSink' # type: str self.store_settings = store_settings + self.format_settings = format_settings class ParquetSource(CopySource): @@ -26820,6 +27878,50 @@ def __init__( self.additional_columns = additional_columns +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The write setting type.Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller than or equal to the + specified count. Type: integer (or Expression with resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + :code:``_:code:``.:code:`` when copy from non-file + based store without partitionOptions. Type: string (or Expression with resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + max_rows_per_file: Optional[object] = None, + file_name_prefix: Optional[object] = None, + **kwargs + ): + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'ParquetWriteSettings' # type: str + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -27307,6 +28409,48 @@ def __init__( self.query = query +class PipelineElapsedTimeMetricPolicy(msrest.serialization.Model): + """Pipeline ElapsedTime Metric Policy. + + :param duration: TimeSpan value, after which an Azure Monitoring Metric is fired. + :type duration: object + """ + + _attribute_map = { + 'duration': {'key': 'duration', 'type': 'object'}, + } + + def __init__( + self, + *, + duration: Optional[object] = None, + **kwargs + ): + super(PipelineElapsedTimeMetricPolicy, self).__init__(**kwargs) + self.duration = duration + + +class PipelineFolder(msrest.serialization.Model): + """The folder that this Pipeline is in. If not specified, Pipeline will appear at the root level. + + :param name: The name of the folder that this Pipeline is in. + :type name: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + **kwargs + ): + super(PipelineFolder, self).__init__(**kwargs) + self.name = name + + class PipelineListResponse(msrest.serialization.Model): """A list of pipeline resources. @@ -27339,6 +28483,28 @@ def __init__( self.next_link = next_link +class PipelinePolicy(msrest.serialization.Model): + """Pipeline Policy. + + :param elapsed_time_metric: Pipeline ElapsedTime Metric Policy. + :type elapsed_time_metric: + ~data_factory_management_client.models.PipelineElapsedTimeMetricPolicy + """ + + _attribute_map = { + 'elapsed_time_metric': {'key': 'elapsedTimeMetric', 'type': 'PipelineElapsedTimeMetricPolicy'}, + } + + def __init__( + self, + *, + elapsed_time_metric: Optional["PipelineElapsedTimeMetricPolicy"] = None, + **kwargs + ): + super(PipelinePolicy, self).__init__(**kwargs) + self.elapsed_time_metric = elapsed_time_metric + + class PipelineReference(msrest.serialization.Model): """Pipeline reference type. @@ -27409,8 +28575,11 @@ class PipelineResource(SubResource): :type annotations: list[object] :param run_dimensions: Dimensions emitted by Pipeline. :type run_dimensions: dict[str, object] - :param name_folder_name: The name of the folder that this Pipeline is in. - :type name_folder_name: str + :param folder: The folder that this Pipeline is in. If not specified, Pipeline will appear at + the root level. + :type folder: ~data_factory_management_client.models.PipelineFolder + :param policy: Pipeline Policy. + :type policy: ~data_factory_management_client.models.PipelinePolicy """ _validation = { @@ -27434,7 +28603,8 @@ class PipelineResource(SubResource): 'concurrency': {'key': 'properties.concurrency', 'type': 'int'}, 'annotations': {'key': 'properties.annotations', 'type': '[object]'}, 'run_dimensions': {'key': 'properties.runDimensions', 'type': '{object}'}, - 'name_folder_name': {'key': 'folder.name', 'type': 'str'}, + 'folder': {'key': 'properties.folder', 'type': 'PipelineFolder'}, + 'policy': {'key': 'properties.policy', 'type': 'PipelinePolicy'}, } def __init__( @@ -27448,7 +28618,8 @@ def __init__( concurrency: Optional[int] = None, annotations: Optional[List[object]] = None, run_dimensions: Optional[Dict[str, object]] = None, - name_folder_name: Optional[str] = None, + folder: Optional["PipelineFolder"] = None, + policy: Optional["PipelinePolicy"] = None, **kwargs ): super(PipelineResource, self).__init__(**kwargs) @@ -27460,7 +28631,8 @@ def __init__( self.concurrency = concurrency self.annotations = annotations self.run_dimensions = run_dimensions - self.name_folder_name = name_folder_name + self.folder = folder + self.policy = policy class PipelineRun(msrest.serialization.Model): @@ -27671,7 +28843,7 @@ def __init__( self.use_type_default = use_type_default -class PostgreSQLLinkedService(LinkedService): +class PostgreSqlLinkedService(LinkedService): """Linked service for PostgreSQL data source. All required parameters must be populated in order to send to Azure. @@ -27729,14 +28901,14 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(PostgreSQLLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(PostgreSqlLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'PostgreSql' # type: str self.connection_string = connection_string self.password = password self.encrypted_credential = encrypted_credential -class PostgreSQLSource(TabularSource): +class PostgreSqlSource(TabularSource): """A copy activity source for PostgreSQL databases. All required parameters must be populated in order to send to Azure. @@ -27792,12 +28964,12 @@ def __init__( query: Optional[object] = None, **kwargs ): - super(PostgreSQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'PostgreSqlSource' # type: str self.query = query -class PostgreSQLTableDataset(Dataset): +class PostgreSqlTableDataset(Dataset): """The PostgreSQL table dataset. All required parameters must be populated in order to send to Azure. @@ -27870,7 +29042,7 @@ def __init__( schema_type_properties_schema: Optional[object] = None, **kwargs ): - super(PostgreSQLTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(PostgreSqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'PostgreSqlTable' # type: str self.table_name = table_name self.table = table @@ -29234,13 +30406,9 @@ class RestSink(CopySink): :type http_request_timeout: object :param request_interval: The time to await before sending next request, in milliseconds. :type request_interval: object - :param compression_type: Compression Type to Send data in compressed format with Optimal - Compression Level, Default is None. And The Only Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an Object before calling - the rest endpoint , Default is false. ex: if true request content sample format is { rows:[]} - else the format is []. - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in compressed format with + Optimal Compression Level, Default is None. And The Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -29259,8 +30427,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__( @@ -29276,8 +30443,7 @@ def __init__( additional_headers: Optional[object] = None, http_request_timeout: Optional[object] = None, request_interval: Optional[object] = None, - compression_type: Optional[object] = None, - wrap_request_json_in_an_object: Optional[object] = None, + http_compression_type: Optional[object] = None, **kwargs ): super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) @@ -29286,8 +30452,7 @@ def __init__( self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout self.request_interval = request_interval - self.compression_type = compression_type - self.wrap_request_json_in_an_object = wrap_request_json_in_an_object + self.http_compression_type = http_compression_type class RestSource(CopySource): @@ -31209,7 +32374,7 @@ class SapHanaSource(TabularSource): :type packet_size: object :param partition_option: The partition mechanism that will be used for SAP HANA read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "SapHanaDynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SapHanaPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP HANA source partitioning. :type partition_settings: ~data_factory_management_client.models.SapHanaPartitionSettings @@ -31229,7 +32394,7 @@ class SapHanaSource(TabularSource): 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } @@ -31244,7 +32409,7 @@ def __init__( additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, packet_size: Optional[object] = None, - partition_option: Optional[Union[str, "SapHanaPartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["SapHanaPartitionSettings"] = None, **kwargs ): @@ -31918,7 +33083,7 @@ class SapTableSource(TabularSource): :param partition_option: The partition mechanism that will be used for SAP table read in parallel. Possible values include: "None", "PartitionOnInt", "PartitionOnCalendarYear", "PartitionOnCalendarMonth", "PartitionOnCalendarDate", "PartitionOnTime". - :type partition_option: str or ~data_factory_management_client.models.SapTablePartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for SAP table source partitioning. :type partition_settings: ~data_factory_management_client.models.SapTablePartitionSettings @@ -31943,7 +33108,7 @@ class SapTableSource(TabularSource): 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'custom_rfc_read_table_function_module': {'key': 'customRfcReadTableFunctionModule', 'type': 'object'}, 'sap_data_column_delimiter': {'key': 'sapDataColumnDelimiter', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } @@ -31963,7 +33128,7 @@ def __init__( batch_size: Optional[object] = None, custom_rfc_read_table_function_module: Optional[object] = None, sap_data_column_delimiter: Optional[object] = None, - partition_option: Optional[Union[str, "SapTablePartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["SapTablePartitionSettings"] = None, **kwargs ): @@ -34197,7 +35362,7 @@ def __init__( self.query = query -class SQLDWSink(CopySink): +class SqlDwSink(CopySink): """A copy activity SQL Data Warehouse sink. All required parameters must be populated in order to send to Azure. @@ -34235,7 +35400,7 @@ class SQLDWSink(CopySink): :type allow_copy_command: object :param copy_command_settings: Specifies Copy Command related settings when allowCopyCommand is true. - :type copy_command_settings: ~data_factory_management_client.models.DWCopyCommandSettings + :type copy_command_settings: ~data_factory_management_client.models.DwCopyCommandSettings :param table_option: The option to handle sink table, such as autoCreate. For now only 'autoCreate' value is supported. Type: string (or Expression with resultType string). :type table_option: object @@ -34257,7 +35422,7 @@ class SQLDWSink(CopySink): 'allow_poly_base': {'key': 'allowPolyBase', 'type': 'object'}, 'poly_base_settings': {'key': 'polyBaseSettings', 'type': 'PolybaseSettings'}, 'allow_copy_command': {'key': 'allowCopyCommand', 'type': 'object'}, - 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DWCopyCommandSettings'}, + 'copy_command_settings': {'key': 'copyCommandSettings', 'type': 'DwCopyCommandSettings'}, 'table_option': {'key': 'tableOption', 'type': 'object'}, } @@ -34274,11 +35439,11 @@ def __init__( allow_poly_base: Optional[object] = None, poly_base_settings: Optional["PolybaseSettings"] = None, allow_copy_command: Optional[object] = None, - copy_command_settings: Optional["DWCopyCommandSettings"] = None, + copy_command_settings: Optional["DwCopyCommandSettings"] = None, table_option: Optional[object] = None, **kwargs ): - super(SQLDWSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlDwSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlDWSink' # type: str self.pre_copy_script = pre_copy_script self.allow_poly_base = allow_poly_base @@ -34288,7 +35453,7 @@ def __init__( self.table_option = table_option -class SQLDWSource(TabularSource): +class SqlDwSource(TabularSource): """A copy activity SQL Data Warehouse source. All required parameters must be populated in order to send to Azure. @@ -34326,9 +35491,9 @@ class SQLDWSource(TabularSource): :type stored_procedure_parameters: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -34346,8 +35511,8 @@ class SQLDWSource(TabularSource): 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -34362,11 +35527,11 @@ def __init__( sql_reader_query: Optional[object] = None, sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlDwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlDWSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -34375,7 +35540,7 @@ def __init__( self.partition_settings = partition_settings -class SQLMiSink(CopySink): +class SqlMiSink(CopySink): """A copy activity Azure SQL Managed Instance sink. All required parameters must be populated in order to send to Azure. @@ -34457,7 +35622,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SQLMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlMiSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlMISink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -34467,7 +35632,7 @@ def __init__( self.table_option = table_option -class SQLMiSource(TabularSource): +class SqlMiSource(TabularSource): """A copy activity Azure SQL Managed Instance source. All required parameters must be populated in order to send to Azure. @@ -34506,9 +35671,9 @@ class SQLMiSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -34527,8 +35692,8 @@ class SQLMiSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -34544,11 +35709,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlMiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlMISource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -34558,7 +35723,7 @@ def __init__( self.partition_settings = partition_settings -class SQLPartitionSettings(msrest.serialization.Model): +class SqlPartitionSettings(msrest.serialization.Model): """The settings that will be leveraged for Sql source partitioning. :param partition_column_name: The name of the column in integer or datetime type that will be @@ -34591,13 +35756,13 @@ def __init__( partition_lower_bound: Optional[object] = None, **kwargs ): - super(SQLPartitionSettings, self).__init__(**kwargs) + super(SqlPartitionSettings, self).__init__(**kwargs) self.partition_column_name = partition_column_name self.partition_upper_bound = partition_upper_bound self.partition_lower_bound = partition_lower_bound -class SQLServerLinkedService(LinkedService): +class SqlServerLinkedService(LinkedService): """SQL Server linked service. All required parameters must be populated in order to send to Azure. @@ -34661,7 +35826,7 @@ def __init__( encrypted_credential: Optional[object] = None, **kwargs ): - super(SQLServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.type = 'SqlServer' # type: str self.connection_string = connection_string self.user_name = user_name @@ -34669,7 +35834,7 @@ def __init__( self.encrypted_credential = encrypted_credential -class SQLServerSink(CopySink): +class SqlServerSink(CopySink): """A copy activity SQL server sink. All required parameters must be populated in order to send to Azure. @@ -34751,7 +35916,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SQLServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlServerSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlServerSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -34761,7 +35926,7 @@ def __init__( self.table_option = table_option -class SQLServerSource(TabularSource): +class SqlServerSource(TabularSource): """A copy activity SQL server source. All required parameters must be populated in order to send to Azure. @@ -34800,9 +35965,9 @@ class SQLServerSource(TabularSource): :type produce_additional_types: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -34821,8 +35986,8 @@ class SQLServerSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -34838,11 +36003,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, produce_additional_types: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlServerSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -34852,7 +36017,7 @@ def __init__( self.partition_settings = partition_settings -class SQLServerStoredProcedureActivity(ExecutionActivity): +class SqlServerStoredProcedureActivity(ExecutionActivity): """SQL stored procedure activity type. All required parameters must be populated in order to send to Azure. @@ -34916,13 +36081,13 @@ def __init__( stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, **kwargs ): - super(SQLServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) + super(SqlServerStoredProcedureActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.type = 'SqlServerStoredProcedure' # type: str self.stored_procedure_name = stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters -class SQLServerTableDataset(Dataset): +class SqlServerTableDataset(Dataset): """The on-premises SQL Server dataset. All required parameters must be populated in order to send to Azure. @@ -34996,14 +36161,14 @@ def __init__( table: Optional[object] = None, **kwargs ): - super(SQLServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + super(SqlServerTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.type = 'SqlServerTable' # type: str self.table_name = table_name self.schema_type_properties_schema = schema_type_properties_schema self.table = table -class SQLSink(CopySink): +class SqlSink(CopySink): """A copy activity SQL sink. All required parameters must be populated in order to send to Azure. @@ -35085,7 +36250,7 @@ def __init__( table_option: Optional[object] = None, **kwargs ): - super(SQLSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + super(SqlSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.type = 'SqlSink' # type: str self.sql_writer_stored_procedure_name = sql_writer_stored_procedure_name self.sql_writer_table_type = sql_writer_table_type @@ -35095,7 +36260,7 @@ def __init__( self.table_option = table_option -class SQLSource(TabularSource): +class SqlSource(TabularSource): """A copy activity SQL source. All required parameters must be populated in order to send to Azure. @@ -35136,9 +36301,9 @@ class SQLSource(TabularSource): :type isolation_level: object :param partition_option: The partition mechanism that will be used for Sql read in parallel. Possible values include: "None", "PhysicalPartitionsOfTable", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.SQLPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for Sql source partitioning. - :type partition_settings: ~data_factory_management_client.models.SQLPartitionSettings + :type partition_settings: ~data_factory_management_client.models.SqlPartitionSettings """ _validation = { @@ -35157,8 +36322,8 @@ class SQLSource(TabularSource): 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, - 'partition_settings': {'key': 'partitionSettings', 'type': 'SQLPartitionSettings'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, + 'partition_settings': {'key': 'partitionSettings', 'type': 'SqlPartitionSettings'}, } def __init__( @@ -35174,11 +36339,11 @@ def __init__( sql_reader_stored_procedure_name: Optional[object] = None, stored_procedure_parameters: Optional[Dict[str, "StoredProcedureParameter"]] = None, isolation_level: Optional[object] = None, - partition_option: Optional[Union[str, "SQLPartitionOption"]] = None, - partition_settings: Optional["SQLPartitionSettings"] = None, + partition_option: Optional[object] = None, + partition_settings: Optional["SqlPartitionSettings"] = None, **kwargs ): - super(SQLSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.type = 'SqlSource' # type: str self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name @@ -36665,6 +37830,80 @@ def __init__( self.type_conversion_settings = type_conversion_settings +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarGZipReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. The Compression setting type.Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression file name as folder + path. Type: boolean (or Expression with resultType boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + preserve_compression_file_name_as_folder: Optional[object] = None, + **kwargs + ): + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'TarReadSettings' # type: str + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -36813,7 +38052,7 @@ class TeradataSource(TabularSource): :type query: object :param partition_option: The partition mechanism that will be used for teradata read in parallel. Possible values include: "None", "Hash", "DynamicRange". - :type partition_option: str or ~data_factory_management_client.models.TeradataPartitionOption + :type partition_option: object :param partition_settings: The settings that will be leveraged for teradata source partitioning. :type partition_settings: ~data_factory_management_client.models.TeradataPartitionSettings @@ -36832,7 +38071,7 @@ class TeradataSource(TabularSource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, - 'partition_option': {'key': 'partitionOption', 'type': 'str'}, + 'partition_option': {'key': 'partitionOption', 'type': 'object'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } @@ -36846,7 +38085,7 @@ def __init__( query_timeout: Optional[object] = None, additional_columns: Optional[List["AdditionalColumns"]] = None, query: Optional[object] = None, - partition_option: Optional[Union[str, "TeradataPartitionOption"]] = None, + partition_option: Optional[object] = None, partition_settings: Optional["TeradataPartitionSettings"] = None, **kwargs ): @@ -37427,7 +38666,7 @@ class TumblingWindowTrigger(Trigger): trigger window that is ready. :type pipeline: ~data_factory_management_client.models.TriggerPipelineReference :param frequency: Required. The frequency of the time windows. Possible values include: - "Minute", "Hour". + "Minute", "Hour", "Month". :type frequency: str or ~data_factory_management_client.models.TumblingWindowFrequency :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 Minutes. @@ -38293,17 +39532,24 @@ class WebActivityAuthentication(msrest.serialization.Model): All required parameters must be populated in order to send to Azure. - :param type: Required. Web activity authentication (Basic/ClientCertificate/MSI). + :param type: Required. Web activity authentication + (Basic/ClientCertificate/MSI/ServicePrincipal). :type type: str - :param pfx: Base64-encoded contents of a PFX file. + :param pfx: Base64-encoded contents of a PFX file or Certificate when used for + ServicePrincipal. :type pfx: ~data_factory_management_client.models.SecretBase - :param username: Web activity authentication user name for basic authentication. + :param username: Web activity authentication user name for basic authentication or ClientID + when used for ServicePrincipal. :type username: str - :param password: Password for the PFX file or basic authentication. + :param password: Password for the PFX file or basic authentication / Secret when used for + ServicePrincipal. :type password: ~data_factory_management_client.models.SecretBase :param resource: Resource for which Azure Auth token will be requested when using MSI Authentication. :type resource: str + :param user_tenant: TenantId for which Azure Auth token will be requested when using + ServicePrincipal Authentication. Type: string (or Expression with resultType string). + :type user_tenant: object """ _validation = { @@ -38316,6 +39562,7 @@ class WebActivityAuthentication(msrest.serialization.Model): 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'SecretBase'}, 'resource': {'key': 'resource', 'type': 'str'}, + 'user_tenant': {'key': 'userTenant', 'type': 'object'}, } def __init__( @@ -38326,6 +39573,7 @@ def __init__( username: Optional[str] = None, password: Optional["SecretBase"] = None, resource: Optional[str] = None, + user_tenant: Optional[object] = None, **kwargs ): super(WebActivityAuthentication, self).__init__(**kwargs) @@ -38334,6 +39582,7 @@ def __init__( self.username = username self.password = password self.resource = resource + self.user_tenant = user_tenant class WebLinkedServiceTypeProperties(msrest.serialization.Model): diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py index 3f6a32ff284..95d26857909 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py @@ -6,40 +6,40 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._operation_operations import OperationOperations -from ._factory_operations import FactoryOperations +from ._operations import Operations +from ._factories_operations import FactoriesOperations from ._exposure_control_operations import ExposureControlOperations -from ._integration_runtime_operations import IntegrationRuntimeOperations +from ._integration_runtimes_operations import IntegrationRuntimesOperations from ._integration_runtime_object_metadata_operations import IntegrationRuntimeObjectMetadataOperations -from ._integration_runtime_node_operations import IntegrationRuntimeNodeOperations -from ._linked_service_operations import LinkedServiceOperations -from ._dataset_operations import DatasetOperations -from ._pipeline_operations import PipelineOperations -from ._pipeline_run_operations import PipelineRunOperations -from ._activity_run_operations import ActivityRunOperations -from ._trigger_operations import TriggerOperations -from ._trigger_run_operations import TriggerRunOperations -from ._data_flow_operations import DataFlowOperations +from ._integration_runtime_nodes_operations import IntegrationRuntimeNodesOperations +from ._linked_services_operations import LinkedServicesOperations +from ._datasets_operations import DatasetsOperations +from ._pipelines_operations import PipelinesOperations +from ._pipeline_runs_operations import PipelineRunsOperations +from ._activity_runs_operations import ActivityRunsOperations +from ._triggers_operations import TriggersOperations +from ._trigger_runs_operations import TriggerRunsOperations +from ._data_flows_operations import DataFlowsOperations from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations -from ._managed_virtual_network_operations import ManagedVirtualNetworkOperations -from ._managed_private_endpoint_operations import ManagedPrivateEndpointOperations +from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations +from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations __all__ = [ - 'OperationOperations', - 'FactoryOperations', + 'Operations', + 'FactoriesOperations', 'ExposureControlOperations', - 'IntegrationRuntimeOperations', + 'IntegrationRuntimesOperations', 'IntegrationRuntimeObjectMetadataOperations', - 'IntegrationRuntimeNodeOperations', - 'LinkedServiceOperations', - 'DatasetOperations', - 'PipelineOperations', - 'PipelineRunOperations', - 'ActivityRunOperations', - 'TriggerOperations', - 'TriggerRunOperations', - 'DataFlowOperations', + 'IntegrationRuntimeNodesOperations', + 'LinkedServicesOperations', + 'DatasetsOperations', + 'PipelinesOperations', + 'PipelineRunsOperations', + 'ActivityRunsOperations', + 'TriggersOperations', + 'TriggerRunsOperations', + 'DataFlowsOperations', 'DataFlowDebugSessionOperations', - 'ManagedVirtualNetworkOperations', - 'ManagedPrivateEndpointOperations', + 'ManagedVirtualNetworksOperations', + 'ManagedPrivateEndpointsOperations', ] diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py similarity index 74% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py index 192e09232ad..f51ff306dc7 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_activity_runs_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -18,13 +17,13 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class ActivityRunOperations(object): - """ActivityRunOperations operations. +class ActivityRunsOperations(object): + """ActivityRunsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -50,11 +49,7 @@ def query_by_pipeline_run( resource_group_name, # type: str factory_name, # type: str run_id, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] + filter_parameters, # type: "models.RunFilterParameters" **kwargs # type: Any ): # type: (...) -> "models.ActivityRunsQueryResponse" @@ -66,31 +61,21 @@ def query_by_pipeline_run( :type factory_name: str :param run_id: The pipeline run identifier. :type run_id: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :param filter_parameters: Parameters to filter the activity runs. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: ActivityRunsQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ActivityRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ActivityRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_pipeline_run.metadata['url'] # type: ignore @@ -109,13 +94,12 @@ def query_by_pipeline_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py index 446c117302f..976a9653c6e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_debug_session_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -20,7 +20,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar, Union + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -51,21 +51,18 @@ def _create_initial( self, resource_group_name, # type: str factory_name, # type: str - compute_type=None, # type: Optional[str] - core_count=None, # type: Optional[int] - time_to_live=None, # type: Optional[int] - name=None, # type: Optional[str] - properties=None, # type: Optional["models.IntegrationRuntime"] + request, # type: "models.CreateDataFlowDebugSessionRequest" **kwargs # type: Any ): # type: (...) -> Optional["models.CreateDataFlowDebugSessionResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.CreateDataFlowDebugSessionResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.CreateDataFlowDebugSessionRequest(compute_type=compute_type, core_count=core_count, time_to_live=time_to_live, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._create_initial.metadata['url'] # type: ignore @@ -83,13 +80,12 @@ def _create_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'CreateDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -115,11 +111,7 @@ def begin_create( self, resource_group_name, # type: str factory_name, # type: str - compute_type=None, # type: Optional[str] - core_count=None, # type: Optional[int] - time_to_live=None, # type: Optional[int] - name=None, # type: Optional[str] - properties=None, # type: Optional["models.IntegrationRuntime"] + request, # type: "models.CreateDataFlowDebugSessionRequest" **kwargs # type: Any ): # type: (...) -> LROPoller["models.CreateDataFlowDebugSessionResponse"] @@ -129,18 +121,8 @@ def begin_create( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param compute_type: Compute type of the cluster. The value will be overwritten by the same - setting in integration runtime if provided. - :type compute_type: str - :param core_count: Core count of the cluster. The value will be overwritten by the same setting - in integration runtime if provided. - :type core_count: int - :param time_to_live: Time to live setting of the cluster in minutes. - :type time_to_live: int - :param name: The resource name. - :type name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param request: Data flow debug session definition. + :type request: ~data_factory_management_client.models.CreateDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -162,11 +144,7 @@ def begin_create( raw_result = self._create_initial( resource_group_name=resource_group_name, factory_name=factory_name, - compute_type=compute_type, - core_count=core_count, - time_to_live=time_to_live, - name=name, - properties=properties, + request=request, cls=lambda x,y,z: x, **kwargs ) @@ -181,7 +159,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -214,14 +198,17 @@ def query_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.QueryDataFlowDebugSessionsResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -271,16 +258,7 @@ def add_data_flow( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] - datasets=None, # type: Optional[List["models.DatasetDebugResource"]] - linked_services=None, # type: Optional[List["models.LinkedServiceDebugResource"]] - source_settings=None, # type: Optional[List["models.DataFlowSourceSetting"]] - parameters=None, # type: Optional[Dict[str, object]] - dataset_parameters=None, # type: Optional[object] - folder_path=None, # type: Optional[object] - reference_name=None, # type: Optional[str] - name=None, # type: Optional[str] - properties=None, # type: Optional["models.DataFlow"] + request, # type: "models.DataFlowDebugPackage" **kwargs # type: Any ): # type: (...) -> "models.AddDataFlowToDebugSessionResponse" @@ -290,39 +268,21 @@ def add_data_flow( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param datasets: List of datasets. - :type datasets: list[~data_factory_management_client.models.DatasetDebugResource] - :param linked_services: List of linked services. - :type linked_services: list[~data_factory_management_client.models.LinkedServiceDebugResource] - :param source_settings: Source setting for data flow debug. - :type source_settings: list[~data_factory_management_client.models.DataFlowSourceSetting] - :param parameters: Data flow parameters. - :type parameters: dict[str, object] - :param dataset_parameters: Parameters for dataset. - :type dataset_parameters: object - :param folder_path: Folder path for staging blob. Type: string (or Expression with resultType - string). - :type folder_path: object - :param reference_name: Reference LinkedService name. - :type reference_name: str - :param name: The resource name. - :type name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param request: Data flow debug session definition with debug content. + :type request: ~data_factory_management_client.models.DataFlowDebugPackage :keyword callable cls: A custom type or function that will be passed the direct response :return: AddDataFlowToDebugSessionResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.AddDataFlowToDebugSessionResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AddDataFlowToDebugSessionResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugPackage(session_id=session_id, datasets=datasets, linked_services=linked_services, source_settings=source_settings, parameters_debug_settings_parameters=parameters, dataset_parameters=dataset_parameters, folder_path=folder_path, reference_name=reference_name, name=name, properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.add_data_flow.metadata['url'] # type: ignore @@ -340,13 +300,12 @@ def add_data_flow( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugPackage') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -366,7 +325,7 @@ def delete( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] + request, # type: "models.DeleteDataFlowDebugSessionRequest" **kwargs # type: Any ): # type: (...) -> None @@ -376,20 +335,21 @@ def delete( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str + :param request: Data flow debug session definition for deletion. + :type request: ~data_factory_management_client.models.DeleteDataFlowDebugSessionRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DeleteDataFlowDebugSessionRequest(session_id=session_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -407,12 +367,12 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DeleteDataFlowDebugSessionRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -429,19 +389,18 @@ def _execute_command_initial( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] - command=None, # type: Optional[Union[str, "models.DataFlowDebugCommandType"]] - command_payload=None, # type: Optional["models.DataFlowDebugCommandPayload"] + request, # type: "models.DataFlowDebugCommandRequest" **kwargs # type: Any ): # type: (...) -> Optional["models.DataFlowDebugCommandResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DataFlowDebugCommandResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - request = models.DataFlowDebugCommandRequest(session_id=session_id, command=command, command_payload=command_payload) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self._execute_command_initial.metadata['url'] # type: ignore @@ -459,13 +418,12 @@ def _execute_command_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(request, 'DataFlowDebugCommandRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -491,9 +449,7 @@ def begin_execute_command( self, resource_group_name, # type: str factory_name, # type: str - session_id=None, # type: Optional[str] - command=None, # type: Optional[Union[str, "models.DataFlowDebugCommandType"]] - command_payload=None, # type: Optional["models.DataFlowDebugCommandPayload"] + request, # type: "models.DataFlowDebugCommandRequest" **kwargs # type: Any ): # type: (...) -> LROPoller["models.DataFlowDebugCommandResponse"] @@ -503,12 +459,8 @@ def begin_execute_command( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param session_id: The ID of data flow debug session. - :type session_id: str - :param command: The command type. - :type command: str or ~data_factory_management_client.models.DataFlowDebugCommandType - :param command_payload: The command payload object. - :type command_payload: ~data_factory_management_client.models.DataFlowDebugCommandPayload + :param request: Data flow debug command definition. + :type request: ~data_factory_management_client.models.DataFlowDebugCommandRequest :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a @@ -530,9 +482,7 @@ def begin_execute_command( raw_result = self._execute_command_initial( resource_group_name=resource_group_name, factory_name=factory_name, - session_id=session_id, - command=command, - command_payload=command_payload, + request=request, cls=lambda x,y,z: x, **kwargs ) @@ -547,7 +497,13 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py index e0bd3be1783..41292015b17 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flow_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_data_flows_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class DataFlowOperations(object): - """DataFlowOperations operations. +class DataFlowsOperations(object): + """DataFlowsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -50,7 +50,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str data_flow_name, # type: str - properties, # type: "models.DataFlow" + data_flow, # type: "models.DataFlowResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -63,8 +63,8 @@ def create_or_update( :type factory_name: str :param data_flow_name: The data flow name. :type data_flow_name: str - :param properties: Data flow properties. - :type properties: ~data_factory_management_client.models.DataFlow + :param data_flow: Data flow resource definition. + :type data_flow: ~data_factory_management_client.models.DataFlowResource :param if_match: ETag of the data flow entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -74,12 +74,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - data_flow = models.DataFlowResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -100,13 +101,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(data_flow, 'DataFlowResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -148,9 +148,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -170,7 +173,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -210,9 +213,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -230,6 +236,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -263,14 +270,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DataFlowListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py index 2f866416c74..3ad92c858c9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_dataset_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_datasets_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class DatasetOperations(object): - """DatasetOperations operations. +class DatasetsOperations(object): + """DatasetsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -122,7 +125,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str dataset_name, # type: str - properties, # type: "models.Dataset" + dataset, # type: "models.DatasetResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -135,8 +138,8 @@ def create_or_update( :type factory_name: str :param dataset_name: The dataset name. :type dataset_name: str - :param properties: Dataset properties. - :type properties: ~data_factory_management_client.models.Dataset + :param dataset: Dataset resource definition. + :type dataset: ~data_factory_management_client.models.DatasetResource :param if_match: ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -146,12 +149,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.DatasetResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - dataset = models.DatasetResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -172,13 +176,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(dataset, 'DatasetResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -220,9 +223,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.DatasetResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -242,7 +248,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -284,9 +290,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -304,6 +313,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py index d2667ffac81..b419a713e9f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_exposure_control_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -17,7 +17,7 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] @@ -47,8 +47,7 @@ def __init__(self, client, config, serializer, deserializer): def get_feature_value( self, location_id, # type: str - feature_name=None, # type: Optional[str] - feature_type=None, # type: Optional[str] + exposure_control_request, # type: "models.ExposureControlRequest" **kwargs # type: Any ): # type: (...) -> "models.ExposureControlResponse" @@ -56,22 +55,21 @@ def get_feature_value( :param location_id: The location identifier. :type location_id: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value.metadata['url'] # type: ignore @@ -88,13 +86,12 @@ def get_feature_value( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -114,8 +111,7 @@ def get_feature_value_by_factory( self, resource_group_name, # type: str factory_name, # type: str - feature_name=None, # type: Optional[str] - feature_type=None, # type: Optional[str] + exposure_control_request, # type: "models.ExposureControlRequest" **kwargs # type: Any ): # type: (...) -> "models.ExposureControlResponse" @@ -125,22 +121,21 @@ def get_feature_value_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param feature_name: The feature name. - :type feature_name: str - :param feature_type: The feature type. - :type feature_type: str + :param exposure_control_request: The exposure control request. + :type exposure_control_request: ~data_factory_management_client.models.ExposureControlRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_request = models.ExposureControlRequest(feature_name=feature_name, feature_type=feature_type) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_feature_value_by_factory.metadata['url'] # type: ignore @@ -158,13 +153,12 @@ def get_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_request, 'ExposureControlRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -180,11 +174,11 @@ def get_feature_value_by_factory( return deserialized get_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/getFeatureValue'} # type: ignore - def query_feature_value_by_factory( + def query_feature_values_by_factory( self, resource_group_name, # type: str factory_name, # type: str - exposure_control_requests, # type: List["models.ExposureControlRequest"] + exposure_control_batch_request, # type: "models.ExposureControlBatchRequest" **kwargs # type: Any ): # type: (...) -> "models.ExposureControlBatchResponse" @@ -194,23 +188,24 @@ def query_feature_value_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param exposure_control_requests: List of exposure control features. - :type exposure_control_requests: list[~data_factory_management_client.models.ExposureControlRequest] + :param exposure_control_batch_request: The exposure control request for list of features. + :type exposure_control_batch_request: ~data_factory_management_client.models.ExposureControlBatchRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: ExposureControlBatchResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.ExposureControlBatchResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ExposureControlBatchResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - exposure_control_batch_request = models.ExposureControlBatchRequest(exposure_control_requests=exposure_control_requests) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.query_feature_value_by_factory.metadata['url'] # type: ignore + url = self.query_feature_values_by_factory.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -225,13 +220,12 @@ def query_feature_value_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(exposure_control_batch_request, 'ExposureControlBatchRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -245,4 +239,4 @@ def query_feature_value_by_factory( return cls(pipeline_response, deserialized, {}) return deserialized - query_feature_value_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore + query_feature_values_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/queryFeaturesValue'} # type: ignore diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py similarity index 83% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py index 5b8622e97f9..29d7d4af8a9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factory_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_factories_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class FactoryOperations(object): - """FactoryOperations operations. +class FactoriesOperations(object): + """FactoriesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -58,14 +58,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -112,8 +115,7 @@ def get_next(next_link=None): def configure_factory_repo( self, location_id, # type: str - factory_resource_id=None, # type: Optional[str] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] + factory_repo_update, # type: "models.FactoryRepoUpdate" **kwargs # type: Any ): # type: (...) -> "models.Factory" @@ -121,22 +123,21 @@ def configure_factory_repo( :param location_id: The location identifier. :type location_id: str - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration + :param factory_repo_update: Update factory repo request definition. + :type factory_repo_update: ~data_factory_management_client.models.FactoryRepoUpdate :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - factory_repo_update = models.FactoryRepoUpdate(factory_resource_id=factory_resource_id, repo_configuration=repo_configuration) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.configure_factory_repo.metadata['url'] # type: ignore @@ -153,13 +154,12 @@ def configure_factory_repo( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_repo_update, 'FactoryRepoUpdate') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -191,14 +191,17 @@ def list_by_resource_group( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.FactoryListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -247,12 +250,8 @@ def create_or_update( self, resource_group_name, # type: str factory_name, # type: str + factory, # type: "models.Factory" if_match=None, # type: Optional[str] - location=None, # type: Optional[str] - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] - repo_configuration=None, # type: Optional["models.FactoryRepoConfiguration"] - global_parameters=None, # type: Optional[Dict[str, "models.GlobalParameterSpecification"]] **kwargs # type: Any ): # type: (...) -> "models.Factory" @@ -262,31 +261,24 @@ def create_or_update( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str + :param factory: Factory resource definition. + :type factory: ~data_factory_management_client.models.Factory :param if_match: ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str - :param location: The resource location. - :type location: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~data_factory_management_client.models.FactoryRepoConfiguration - :param global_parameters: List of parameters for factory. - :type global_parameters: dict[str, ~data_factory_management_client.models.GlobalParameterSpecification] :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - factory = models.Factory(location=location, tags=tags, identity=identity, repo_configuration=repo_configuration, global_parameters=global_parameters) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -306,13 +298,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory, 'Factory') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -332,8 +323,7 @@ def update( self, resource_group_name, # type: str factory_name, # type: str - tags=None, # type: Optional[Dict[str, str]] - identity=None, # type: Optional["models.FactoryIdentity"] + factory_update_parameters, # type: "models.FactoryUpdateParameters" **kwargs # type: Any ): # type: (...) -> "models.Factory" @@ -343,22 +333,21 @@ def update( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param tags: The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~data_factory_management_client.models.FactoryIdentity + :param factory_update_parameters: The parameters for updating a factory. + :type factory_update_parameters: ~data_factory_management_client.models.FactoryUpdateParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: Factory, or the result of cls(response) :rtype: ~data_factory_management_client.models.Factory :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.Factory"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - factory_update_parameters = models.FactoryUpdateParameters(tags=tags, identity=identity) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -376,13 +365,12 @@ def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(factory_update_parameters, 'FactoryUpdateParameters') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -421,9 +409,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.Factory"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -442,7 +433,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -481,9 +472,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -500,6 +494,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -518,9 +513,7 @@ def get_git_hub_access_token( self, resource_group_name, # type: str factory_name, # type: str - git_hub_access_code, # type: str - git_hub_access_token_base_url, # type: str - git_hub_client_id=None, # type: Optional[str] + git_hub_access_token_request, # type: "models.GitHubAccessTokenRequest" **kwargs # type: Any ): # type: (...) -> "models.GitHubAccessTokenResponse" @@ -530,24 +523,21 @@ def get_git_hub_access_token( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param git_hub_access_code: GitHub access code. - :type git_hub_access_code: str - :param git_hub_access_token_base_url: GitHub access token base URL. - :type git_hub_access_token_base_url: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str + :param git_hub_access_token_request: Get GitHub access token request definition. + :type git_hub_access_token_request: ~data_factory_management_client.models.GitHubAccessTokenRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: GitHubAccessTokenResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.GitHubAccessTokenResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.GitHubAccessTokenResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - git_hub_access_token_request = models.GitHubAccessTokenRequest(git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_git_hub_access_token.metadata['url'] # type: ignore @@ -565,13 +555,12 @@ def get_git_hub_access_token( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(git_hub_access_token_request, 'GitHubAccessTokenRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -591,11 +580,7 @@ def get_data_plane_access( self, resource_group_name, # type: str factory_name, # type: str - permissions=None, # type: Optional[str] - access_resource_path=None, # type: Optional[str] - profile_name=None, # type: Optional[str] - start_time=None, # type: Optional[str] - expire_time=None, # type: Optional[str] + policy, # type: "models.UserAccessPolicy" **kwargs # type: Any ): # type: (...) -> "models.AccessPolicyResponse" @@ -605,32 +590,21 @@ def get_data_plane_access( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param permissions: The string with permissions for Data Plane access. Currently only 'r' is - supported which grants read only access. - :type permissions: str - :param access_resource_path: The resource path to get access relative to factory. Currently - only empty string is supported which corresponds to the factory resource. - :type access_resource_path: str - :param profile_name: The name of the profile. Currently only the default is supported. The - default value is DefaultProfile. - :type profile_name: str - :param start_time: Start time for the token. If not specified the current time will be used. - :type start_time: str - :param expire_time: Expiration time for the token. Maximum duration for the token is eight - hours and by default the token will expire in eight hours. - :type expire_time: str + :param policy: Data Plane user access policy definition. + :type policy: ~data_factory_management_client.models.UserAccessPolicy :keyword callable cls: A custom type or function that will be passed the direct response :return: AccessPolicyResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.AccessPolicyResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.AccessPolicyResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - policy = models.UserAccessPolicy(permissions=permissions, access_resource_path=access_resource_path, profile_name=profile_name, start_time=start_time, expire_time=expire_time) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get_data_plane_access.metadata['url'] # type: ignore @@ -648,13 +622,12 @@ def get_data_plane_access( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(policy, 'UserAccessPolicy') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py similarity index 90% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py index a7903633080..c9623854aa9 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_node_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_nodes_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -22,8 +22,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class IntegrationRuntimeNodeOperations(object): - """IntegrationRuntimeNodeOperations operations. +class IntegrationRuntimeNodesOperations(object): + """IntegrationRuntimeNodesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -69,9 +69,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -90,7 +93,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -133,9 +136,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -154,6 +160,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -174,7 +181,7 @@ def update( factory_name, # type: str integration_runtime_name, # type: str node_name, # type: str - concurrent_jobs_limit=None, # type: Optional[int] + update_integration_runtime_node_request, # type: "models.UpdateIntegrationRuntimeNodeRequest" **kwargs # type: Any ): # type: (...) -> "models.SelfHostedIntegrationRuntimeNode" @@ -188,21 +195,22 @@ def update( :type integration_runtime_name: str :param node_name: The integration runtime node name. :type node_name: str - :param concurrent_jobs_limit: The number of concurrent jobs permitted to run on the integration - runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed. - :type concurrent_jobs_limit: int + :param update_integration_runtime_node_request: The parameters for updating an integration + runtime node. + :type update_integration_runtime_node_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeNodeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SelfHostedIntegrationRuntimeNode, or the result of cls(response) :rtype: ~data_factory_management_client.models.SelfHostedIntegrationRuntimeNode :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SelfHostedIntegrationRuntimeNode"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_node_request = models.UpdateIntegrationRuntimeNodeRequest(concurrent_jobs_limit=concurrent_jobs_limit) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -222,13 +230,12 @@ def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_node_request, 'UpdateIntegrationRuntimeNodeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -269,9 +276,12 @@ def get_ip_address( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeNodeIpAddress"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_ip_address.metadata['url'] # type: ignore @@ -290,7 +300,7 @@ def get_ip_address( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py index 461ab7b6539..a04018b467e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_object_metadata_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod @@ -55,9 +55,12 @@ def _refresh_initial( ): # type: (...) -> Optional["models.SsisObjectMetadataStatusResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.SsisObjectMetadataStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._refresh_initial.metadata['url'] # type: ignore @@ -75,7 +78,7 @@ def _refresh_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -147,7 +150,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -166,7 +176,7 @@ def get( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - metadata_path=None, # type: Optional[str] + get_metadata_request=None, # type: Optional["models.GetSsisObjectMetadataRequest"] **kwargs # type: Any ): # type: (...) -> "models.SsisObjectMetadataListResponse" @@ -179,20 +189,21 @@ def get( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param metadata_path: Metadata path. - :type metadata_path: str + :param get_metadata_request: The parameters for getting a SSIS object metadata. + :type get_metadata_request: ~data_factory_management_client.models.GetSsisObjectMetadataRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: SsisObjectMetadataListResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.SsisObjectMetadataListResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.SsisObjectMetadataListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - get_metadata_request = models.GetSsisObjectMetadataRequest(metadata_path=metadata_path) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -211,7 +222,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if get_metadata_request is not None: @@ -220,7 +231,6 @@ def get( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py similarity index 88% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py index 1fb5fc6b30d..d0a57313403 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtimes_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -25,8 +25,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class IntegrationRuntimeOperations(object): - """IntegrationRuntimeOperations operations. +class IntegrationRuntimesOperations(object): + """IntegrationRuntimesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -66,14 +66,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -124,7 +127,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - properties, # type: "models.IntegrationRuntime" + integration_runtime, # type: "models.IntegrationRuntimeResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -137,8 +140,8 @@ def create_or_update( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param properties: Integration runtime properties. - :type properties: ~data_factory_management_client.models.IntegrationRuntime + :param integration_runtime: Integration runtime resource definition. + :type integration_runtime: ~data_factory_management_client.models.IntegrationRuntimeResource :param if_match: ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -148,12 +151,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - integration_runtime = models.IntegrationRuntimeResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -174,13 +178,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(integration_runtime, 'IntegrationRuntimeResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -223,9 +226,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -245,7 +251,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -270,8 +276,7 @@ def update( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - auto_update=None, # type: Optional[Union[str, "models.IntegrationRuntimeAutoUpdate"]] - update_delay_offset=None, # type: Optional[str] + update_integration_runtime_request, # type: "models.UpdateIntegrationRuntimeRequest" **kwargs # type: Any ): # type: (...) -> "models.IntegrationRuntimeResource" @@ -283,24 +288,21 @@ def update( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param auto_update: Enables or disables the auto-update feature of the self-hosted integration - runtime. See https://go.microsoft.com/fwlink/?linkid=854189. - :type auto_update: str or ~data_factory_management_client.models.IntegrationRuntimeAutoUpdate - :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The - integration runtime auto update will happen on that time. - :type update_delay_offset: str + :param update_integration_runtime_request: The parameters for updating an integration runtime. + :type update_integration_runtime_request: ~data_factory_management_client.models.UpdateIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeResource, or the result of cls(response) :rtype: ~data_factory_management_client.models.IntegrationRuntimeResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.update.metadata['url'] # type: ignore @@ -319,13 +321,12 @@ def update( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -363,9 +364,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -383,6 +387,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -419,9 +424,12 @@ def get_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_status.metadata['url'] # type: ignore @@ -439,7 +447,7 @@ def get_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -480,9 +488,12 @@ def get_connection_info( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeConnectionInfo"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_connection_info.metadata['url'] # type: ignore @@ -500,7 +511,7 @@ def get_connection_info( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -523,7 +534,7 @@ def regenerate_auth_key( resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - key_name=None, # type: Optional[Union[str, "models.IntegrationRuntimeAuthKeyName"]] + regenerate_key_parameters, # type: "models.IntegrationRuntimeRegenerateKeyParameters" **kwargs # type: Any ): # type: (...) -> "models.IntegrationRuntimeAuthKeys" @@ -535,20 +546,22 @@ def regenerate_auth_key( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param key_name: The name of the authentication key to regenerate. - :type key_name: str or ~data_factory_management_client.models.IntegrationRuntimeAuthKeyName + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. + :type regenerate_key_parameters: ~data_factory_management_client.models.IntegrationRuntimeRegenerateKeyParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeAuthKeys, or the result of cls(response) :rtype: ~data_factory_management_client.models.IntegrationRuntimeAuthKeys :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - regenerate_key_parameters = models.IntegrationRuntimeRegenerateKeyParameters(key_name=key_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.regenerate_auth_key.metadata['url'] # type: ignore @@ -567,13 +580,12 @@ def regenerate_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(regenerate_key_parameters, 'IntegrationRuntimeRegenerateKeyParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -589,7 +601,7 @@ def regenerate_auth_key( return deserialized regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore - def list_auth_key( + def list_auth_keys( self, resource_group_name, # type: str factory_name, # type: str @@ -611,12 +623,15 @@ def list_auth_key( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeAuthKeys"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL - url = self.list_auth_key.metadata['url'] # type: ignore + url = self.list_auth_keys.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -631,7 +646,7 @@ def list_auth_key( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -647,7 +662,7 @@ def list_auth_key( return cls(pipeline_response, deserialized, {}) return deserialized - list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + list_auth_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore def _start_initial( self, @@ -658,9 +673,12 @@ def _start_initial( ): # type: (...) -> Optional["models.IntegrationRuntimeStatusResponse"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.IntegrationRuntimeStatusResponse"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -678,7 +696,7 @@ def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -750,7 +768,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -773,9 +798,12 @@ def _stop_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -793,6 +821,7 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -856,7 +885,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -895,9 +931,12 @@ def sync_credentials( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.sync_credentials.metadata['url'] # type: ignore @@ -915,6 +954,7 @@ def sync_credentials( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -952,9 +992,12 @@ def get_monitoring_data( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeMonitoringData"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_monitoring_data.metadata['url'] # type: ignore @@ -972,7 +1015,7 @@ def get_monitoring_data( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -1012,9 +1055,12 @@ def upgrade( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.upgrade.metadata['url'] # type: ignore @@ -1032,6 +1078,7 @@ def upgrade( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -1046,12 +1093,12 @@ def upgrade( upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore - def remove_link( + def remove_links( self, resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - linked_factory_name, # type: str + linked_integration_runtime_request, # type: "models.LinkedIntegrationRuntimeRequest" **kwargs # type: Any ): # type: (...) -> None @@ -1064,23 +1111,25 @@ def remove_link( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param linked_factory_name: The data factory name for linked integration runtime. - :type linked_factory_name: str + :param linked_integration_runtime_request: The data factory name for the linked integration + runtime. + :type linked_integration_runtime_request: ~data_factory_management_client.models.LinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL - url = self.remove_link.metadata['url'] # type: ignore + url = self.remove_links.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -1096,12 +1145,12 @@ def remove_link( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -1112,17 +1161,14 @@ def remove_link( if cls: return cls(pipeline_response, None, {}) - remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + remove_links.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore def create_linked_integration_runtime( self, resource_group_name, # type: str factory_name, # type: str integration_runtime_name, # type: str - name=None, # type: Optional[str] - subscription_id=None, # type: Optional[str] - data_factory_name=None, # type: Optional[str] - data_factory_location=None, # type: Optional[str] + create_linked_integration_runtime_request, # type: "models.CreateLinkedIntegrationRuntimeRequest" **kwargs # type: Any ): # type: (...) -> "models.IntegrationRuntimeStatusResponse" @@ -1134,29 +1180,21 @@ def create_linked_integration_runtime( :type factory_name: str :param integration_runtime_name: The integration runtime name. :type integration_runtime_name: str - :param name: The name of the linked integration runtime. - :type name: str - :param subscription_id: The ID of the subscription that the linked integration runtime belongs - to. - :type subscription_id: str - :param data_factory_name: The name of the data factory that the linked integration runtime - belongs to. - :type data_factory_name: str - :param data_factory_location: The location of the data factory that the linked integration - runtime belongs to. - :type data_factory_location: str + :param create_linked_integration_runtime_request: The linked integration runtime properties. + :type create_linked_integration_runtime_request: ~data_factory_management_client.models.CreateLinkedIntegrationRuntimeRequest :keyword callable cls: A custom type or function that will be passed the direct response :return: IntegrationRuntimeStatusResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.IntegrationRuntimeStatusResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_linked_integration_runtime.metadata['url'] # type: ignore @@ -1175,13 +1213,12 @@ def create_linked_integration_runtime( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py index 7124cb588eb..ffb243da168 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_service_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_linked_services_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class LinkedServiceOperations(object): - """LinkedServiceOperations operations. +class LinkedServicesOperations(object): + """LinkedServicesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -122,7 +125,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str linked_service_name, # type: str - properties, # type: "models.LinkedService" + linked_service, # type: "models.LinkedServiceResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -135,8 +138,8 @@ def create_or_update( :type factory_name: str :param linked_service_name: The linked service name. :type linked_service_name: str - :param properties: Properties of linked service. - :type properties: ~data_factory_management_client.models.LinkedService + :param linked_service: Linked service resource definition. + :type linked_service: ~data_factory_management_client.models.LinkedServiceResource :param if_match: ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -146,12 +149,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.LinkedServiceResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - linked_service = models.LinkedServiceResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -172,13 +176,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(linked_service, 'LinkedServiceResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -221,9 +224,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.LinkedServiceResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -243,7 +249,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -285,9 +291,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -305,6 +314,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py similarity index 90% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py index 29be0bd0e6d..d1c7c89531f 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoint_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_private_endpoints_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -18,13 +18,13 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class ManagedPrivateEndpointOperations(object): - """ManagedPrivateEndpointOperations operations. +class ManagedPrivateEndpointsOperations(object): + """ManagedPrivateEndpointsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -67,14 +67,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -127,11 +130,8 @@ def create_or_update( factory_name, # type: str managed_virtual_network_name, # type: str managed_private_endpoint_name, # type: str + managed_private_endpoint, # type: "models.ManagedPrivateEndpointResource" if_match=None, # type: Optional[str] - connection_state=None, # type: Optional["models.ConnectionStateProperties"] - fqdns=None, # type: Optional[List[str]] - group_id=None, # type: Optional[str] - private_link_resource_id=None, # type: Optional[str] **kwargs # type: Any ): # type: (...) -> "models.ManagedPrivateEndpointResource" @@ -145,30 +145,24 @@ def create_or_update( :type managed_virtual_network_name: str :param managed_private_endpoint_name: Managed private endpoint name. :type managed_private_endpoint_name: str + :param managed_private_endpoint: Managed private endpoint resource definition. + :type managed_private_endpoint: ~data_factory_management_client.models.ManagedPrivateEndpointResource :param if_match: ETag of the managed private endpoint entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str - :param connection_state: The managed private endpoint connection state. - :type connection_state: ~data_factory_management_client.models.ConnectionStateProperties - :param fqdns: Fully qualified domain names. - :type fqdns: list[str] - :param group_id: The groupId to which the managed private endpoint is created. - :type group_id: str - :param private_link_resource_id: The ARM resource ID of the resource to which the managed - private endpoint is created. - :type private_link_resource_id: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ManagedPrivateEndpointResource, or the result of cls(response) :rtype: ~data_factory_management_client.models.ManagedPrivateEndpointResource :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - managed_private_endpoint = models.ManagedPrivateEndpointResource(connection_state=connection_state, fqdns=fqdns, group_id=group_id, private_link_resource_id=private_link_resource_id) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -190,13 +184,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_private_endpoint, 'ManagedPrivateEndpointResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -242,9 +235,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedPrivateEndpointResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -265,7 +261,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -308,9 +304,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -329,6 +328,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py similarity index 91% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py index fa043ca3e59..8f81cdf0c80 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_network_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_managed_virtual_networks_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class ManagedVirtualNetworkOperations(object): - """ManagedVirtualNetworkOperations operations. +class ManagedVirtualNetworksOperations(object): + """ManagedVirtualNetworksOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -122,7 +125,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str managed_virtual_network_name, # type: str - properties, # type: "models.ManagedVirtualNetwork" + managed_virtual_network, # type: "models.ManagedVirtualNetworkResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -135,8 +138,8 @@ def create_or_update( :type factory_name: str :param managed_virtual_network_name: Managed virtual network name. :type managed_virtual_network_name: str - :param properties: Managed Virtual Network properties. - :type properties: ~data_factory_management_client.models.ManagedVirtualNetwork + :param managed_virtual_network: Managed Virtual Network resource definition. + :type managed_virtual_network: ~data_factory_management_client.models.ManagedVirtualNetworkResource :param if_match: ETag of the managed Virtual Network entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -146,12 +149,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - managed_virtual_network = models.ManagedVirtualNetworkResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -172,13 +176,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(managed_virtual_network, 'ManagedVirtualNetworkResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -221,9 +224,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.ManagedVirtualNetworkResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -243,7 +249,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py similarity index 90% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py index c5cf3d43f6d..9795a6e8c4e 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operation_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class OperationOperations(object): - """OperationOperations operations. +class Operations(object): + """Operations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -58,14 +58,17 @@ def list( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.OperationListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py similarity index 84% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py index 75634fde5ac..be684c71f0a 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_runs_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -18,13 +17,13 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class PipelineRunOperations(object): - """PipelineRunOperations operations. +class PipelineRunsOperations(object): + """PipelineRunsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -49,11 +48,7 @@ def query_by_factory( self, resource_group_name, # type: str factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] + filter_parameters, # type: "models.RunFilterParameters" **kwargs # type: Any ): # type: (...) -> "models.PipelineRunsQueryResponse" @@ -63,31 +58,21 @@ def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: PipelineRunsQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.PipelineRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -105,13 +90,12 @@ def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -149,9 +133,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineRun"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -169,7 +156,7 @@ def get( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -213,9 +200,12 @@ def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -235,6 +225,7 @@ def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py similarity index 93% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py index d82f423f2cb..d4a5594d606 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipeline_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_pipelines_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -23,8 +23,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class PipelineOperations(object): - """PipelineOperations operations. +class PipelinesOperations(object): + """PipelinesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -64,14 +64,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -146,10 +149,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.PipelineResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -170,13 +176,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(pipeline, 'PipelineResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -218,9 +223,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.PipelineResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -240,7 +248,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -282,9 +290,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -302,6 +313,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -358,10 +370,13 @@ def create_run( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.CreateRunResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_run.metadata['url'] # type: ignore @@ -388,7 +403,7 @@ def create_run( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if parameters is not None: @@ -397,7 +412,6 @@ def create_run( body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py similarity index 84% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py index 3290d8196ab..ca2b12d4a29 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_run_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_runs_operations.py @@ -5,11 +5,10 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -import datetime from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.mgmt.core.exceptions import ARMErrorFormat @@ -18,13 +17,13 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar + from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class TriggerRunOperations(object): - """TriggerRunOperations operations. +class TriggerRunsOperations(object): + """TriggerRunsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -70,9 +69,12 @@ def rerun( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.rerun.metadata['url'] # type: ignore @@ -91,6 +93,7 @@ def rerun( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -130,9 +133,12 @@ def cancel( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.cancel.metadata['url'] # type: ignore @@ -151,6 +157,7 @@ def cancel( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -169,11 +176,7 @@ def query_by_factory( self, resource_group_name, # type: str factory_name, # type: str - last_updated_after, # type: datetime.datetime - last_updated_before, # type: datetime.datetime - continuation_token_parameter=None, # type: Optional[str] - filters=None, # type: Optional[List["models.RunQueryFilter"]] - order_by=None, # type: Optional[List["models.RunQueryOrderBy"]] + filter_parameters, # type: "models.RunFilterParameters" **kwargs # type: Any ): # type: (...) -> "models.TriggerRunsQueryResponse" @@ -183,31 +186,21 @@ def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param last_updated_after: The time at or after which the run event was updated in 'ISO 8601' - format. - :type last_updated_after: ~datetime.datetime - :param last_updated_before: The time at or before which the run event was updated in 'ISO 8601' - format. - :type last_updated_before: ~datetime.datetime - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param filters: List of filters. - :type filters: list[~data_factory_management_client.models.RunQueryFilter] - :param order_by: List of OrderBy option. - :type order_by: list[~data_factory_management_client.models.RunQueryOrderBy] + :param filter_parameters: Parameters to filter the pipeline run. + :type filter_parameters: ~data_factory_management_client.models.RunFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerRunsQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.TriggerRunsQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerRunsQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.RunFilterParameters(continuation_token=continuation_token_parameter, last_updated_after=last_updated_after, last_updated_before=last_updated_before, filters=filters, order_by=order_by) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -225,13 +218,12 @@ def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'RunFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response diff --git a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py similarity index 85% rename from src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py rename to src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py index 142f32f2c31..f85d33b9c68 100644 --- a/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py +++ b/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_triggers_operations.py @@ -8,7 +8,7 @@ from typing import TYPE_CHECKING import warnings -from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse @@ -25,8 +25,8 @@ T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] -class TriggerOperations(object): - """TriggerOperations operations. +class TriggersOperations(object): + """TriggersOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. @@ -66,14 +66,17 @@ def list_by_factory( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL @@ -123,8 +126,7 @@ def query_by_factory( self, resource_group_name, # type: str factory_name, # type: str - continuation_token_parameter=None, # type: Optional[str] - parent_trigger_name=None, # type: Optional[str] + filter_parameters, # type: "models.TriggerFilterParameters" **kwargs # type: Any ): # type: (...) -> "models.TriggerQueryResponse" @@ -134,24 +136,21 @@ def query_by_factory( :type resource_group_name: str :param factory_name: The factory name. :type factory_name: str - :param continuation_token_parameter: The continuation token for getting the next page of - results. Null for first page. - :type continuation_token_parameter: str - :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun - triggers. - :type parent_trigger_name: str + :param filter_parameters: Parameters to filter the triggers. + :type filter_parameters: ~data_factory_management_client.models.TriggerFilterParameters :keyword callable cls: A custom type or function that will be passed the direct response :return: TriggerQueryResponse, or the result of cls(response) :rtype: ~data_factory_management_client.models.TriggerQueryResponse :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token_parameter, parent_trigger_name=parent_trigger_name) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.query_by_factory.metadata['url'] # type: ignore @@ -169,13 +168,12 @@ def query_by_factory( # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -196,7 +194,7 @@ def create_or_update( resource_group_name, # type: str factory_name, # type: str trigger_name, # type: str - properties, # type: "models.Trigger" + trigger, # type: "models.TriggerResource" if_match=None, # type: Optional[str] **kwargs # type: Any ): @@ -209,8 +207,8 @@ def create_or_update( :type factory_name: str :param trigger_name: The trigger name. :type trigger_name: str - :param properties: Properties of the trigger. - :type properties: ~data_factory_management_client.models.Trigger + :param trigger: Trigger resource definition. + :type trigger: ~data_factory_management_client.models.TriggerResource :param if_match: ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update. :type if_match: str @@ -220,12 +218,13 @@ def create_or_update( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) - - trigger = models.TriggerResource(properties=properties) api_version = "2018-06-01" content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore @@ -246,13 +245,12 @@ def create_or_update( if if_match is not None: header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(trigger, 'TriggerResource') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response @@ -294,9 +292,12 @@ def get( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerResource"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore @@ -316,7 +317,7 @@ def get( header_parameters = {} # type: Dict[str, Any] if if_none_match is not None: header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -358,9 +359,12 @@ def delete( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.delete.metadata['url'] # type: ignore @@ -378,6 +382,7 @@ def delete( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -392,7 +397,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore - def _subscribe_to_event_initial( + def _subscribe_to_events_initial( self, resource_group_name, # type: str factory_name, # type: str @@ -401,12 +406,15 @@ def _subscribe_to_event_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL - url = self._subscribe_to_event_initial.metadata['url'] # type: ignore + url = self._subscribe_to_events_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -421,7 +429,7 @@ def _subscribe_to_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -439,9 +447,9 @@ def _subscribe_to_event_initial( return cls(pipeline_response, deserialized, {}) return deserialized - _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + _subscribe_to_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore - def begin_subscribe_to_event( + def begin_subscribe_to_events( self, resource_group_name, # type: str factory_name, # type: str @@ -475,7 +483,7 @@ def begin_subscribe_to_event( ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = self._subscribe_to_event_initial( + raw_result = self._subscribe_to_events_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -493,7 +501,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -505,7 +520,7 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + begin_subscribe_to_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore def get_event_subscription_status( self, @@ -529,9 +544,12 @@ def get_event_subscription_status( :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self.get_event_subscription_status.metadata['url'] # type: ignore @@ -549,7 +567,7 @@ def get_event_subscription_status( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -567,7 +585,7 @@ def get_event_subscription_status( return deserialized get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore - def _unsubscribe_from_event_initial( + def _unsubscribe_from_events_initial( self, resource_group_name, # type: str factory_name, # type: str @@ -576,12 +594,15 @@ def _unsubscribe_from_event_initial( ): # type: (...) -> Optional["models.TriggerSubscriptionOperationStatus"] cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.TriggerSubscriptionOperationStatus"]] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL - url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore + url = self._unsubscribe_from_events_initial.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), @@ -596,7 +617,7 @@ def _unsubscribe_from_event_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = 'application/json' + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -614,9 +635,9 @@ def _unsubscribe_from_event_initial( return cls(pipeline_response, deserialized, {}) return deserialized - _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + _unsubscribe_from_events_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore - def begin_unsubscribe_from_event( + def begin_unsubscribe_from_events( self, resource_group_name, # type: str factory_name, # type: str @@ -650,7 +671,7 @@ def begin_unsubscribe_from_event( ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: - raw_result = self._unsubscribe_from_event_initial( + raw_result = self._unsubscribe_from_events_initial( resource_group_name=resource_group_name, factory_name=factory_name, trigger_name=trigger_name, @@ -668,7 +689,14 @@ def get_long_running_output(pipeline_response): return cls(pipeline_response, deserialized, {}) return deserialized - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -680,7 +708,7 @@ def get_long_running_output(pipeline_response): ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + begin_unsubscribe_from_events.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore def _start_initial( self, @@ -691,9 +719,12 @@ def _start_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._start_initial.metadata['url'] # type: ignore @@ -711,6 +742,7 @@ def _start_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -774,7 +806,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: @@ -797,9 +836,12 @@ def _stop_initial( ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] - error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-06-01" + accept = "application/json" # Construct URL url = self._stop_initial.metadata['url'] # type: ignore @@ -817,6 +859,7 @@ def _stop_initial( # Construct headers header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.post(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) @@ -880,7 +923,14 @@ def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + + if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: diff --git a/src/datafactory/report.md b/src/datafactory/report.md index fb50389d775..3b9bbbeb16b 100644 --- a/src/datafactory/report.md +++ b/src/datafactory/report.md @@ -1,161 +1,228 @@ # Azure CLI Module Creation Report -### datafactory activity-run query-by-pipeline-run - -query-by-pipeline-run a datafactory activity-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory activity-run|ActivityRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-pipeline-run|QueryByPipelineRun| - -#### Parameters +## EXTENSION +|CLI Extension|Command Groups| +|---------|------------| +|az datafactory|[groups](#CommandGroups) + +## GROUPS +### Command groups in `az datafactory` extension +|CLI Command Group|Group Swagger name|Commands| +|---------|------------|--------| +|az datafactory|Factories|[commands](#CommandsInFactories)| +|az datafactory integration-runtime|IntegrationRuntimes|[commands](#CommandsInIntegrationRuntimes)| +|az datafactory integration-runtime-node|IntegrationRuntimeNodes|[commands](#CommandsInIntegrationRuntimeNodes)| +|az datafactory linked-service|LinkedServices|[commands](#CommandsInLinkedServices)| +|az datafactory dataset|Datasets|[commands](#CommandsInDatasets)| +|az datafactory pipeline|Pipelines|[commands](#CommandsInPipelines)| +|az datafactory pipeline-run|PipelineRuns|[commands](#CommandsInPipelineRuns)| +|az datafactory activity-run|ActivityRuns|[commands](#CommandsInActivityRuns)| +|az datafactory trigger|Triggers|[commands](#CommandsInTriggers)| +|az datafactory trigger-run|TriggerRuns|[commands](#CommandsInTriggerRuns)| + +## COMMANDS +### Commands in `az datafactory` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory list](#FactoriesListByResourceGroup)|ListByResourceGroup|[Parameters](#ParametersFactoriesListByResourceGroup)|[Example](#ExamplesFactoriesListByResourceGroup)| +|[az datafactory list](#FactoriesList)|List|[Parameters](#ParametersFactoriesList)|[Example](#ExamplesFactoriesList)| +|[az datafactory show](#FactoriesGet)|Get|[Parameters](#ParametersFactoriesGet)|[Example](#ExamplesFactoriesGet)| +|[az datafactory create](#FactoriesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersFactoriesCreateOrUpdate#Create)|[Example](#ExamplesFactoriesCreateOrUpdate#Create)| +|[az datafactory update](#FactoriesUpdate)|Update|[Parameters](#ParametersFactoriesUpdate)|[Example](#ExamplesFactoriesUpdate)| +|[az datafactory delete](#FactoriesDelete)|Delete|[Parameters](#ParametersFactoriesDelete)|[Example](#ExamplesFactoriesDelete)| +|[az datafactory configure-factory-repo](#FactoriesConfigureFactoryRepo)|ConfigureFactoryRepo|[Parameters](#ParametersFactoriesConfigureFactoryRepo)|[Example](#ExamplesFactoriesConfigureFactoryRepo)| +|[az datafactory get-data-plane-access](#FactoriesGetDataPlaneAccess)|GetDataPlaneAccess|[Parameters](#ParametersFactoriesGetDataPlaneAccess)|[Example](#ExamplesFactoriesGetDataPlaneAccess)| +|[az datafactory get-git-hub-access-token](#FactoriesGetGitHubAccessToken)|GetGitHubAccessToken|[Parameters](#ParametersFactoriesGetGitHubAccessToken)|[Example](#ExamplesFactoriesGetGitHubAccessToken)| + +### Commands in `az datafactory activity-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory activity-run query-by-pipeline-run](#ActivityRunsQueryByPipelineRun)|QueryByPipelineRun|[Parameters](#ParametersActivityRunsQueryByPipelineRun)|[Example](#ExamplesActivityRunsQueryByPipelineRun)| + +### Commands in `az datafactory dataset` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory dataset list](#DatasetsListByFactory)|ListByFactory|[Parameters](#ParametersDatasetsListByFactory)|[Example](#ExamplesDatasetsListByFactory)| +|[az datafactory dataset show](#DatasetsGet)|Get|[Parameters](#ParametersDatasetsGet)|[Example](#ExamplesDatasetsGet)| +|[az datafactory dataset create](#DatasetsCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersDatasetsCreateOrUpdate#Create)|[Example](#ExamplesDatasetsCreateOrUpdate#Create)| +|[az datafactory dataset update](#DatasetsCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersDatasetsCreateOrUpdate#Update)|[Example](#ExamplesDatasetsCreateOrUpdate#Update)| +|[az datafactory dataset delete](#DatasetsDelete)|Delete|[Parameters](#ParametersDatasetsDelete)|[Example](#ExamplesDatasetsDelete)| + +### Commands in `az datafactory integration-runtime` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime list](#IntegrationRuntimesListByFactory)|ListByFactory|[Parameters](#ParametersIntegrationRuntimesListByFactory)|[Example](#ExamplesIntegrationRuntimesListByFactory)| +|[az datafactory integration-runtime show](#IntegrationRuntimesGet)|Get|[Parameters](#ParametersIntegrationRuntimesGet)|[Example](#ExamplesIntegrationRuntimesGet)| +|[az datafactory integration-runtime linked-integration-runtime create](#IntegrationRuntimesCreateLinkedIntegrationRuntime)|CreateLinkedIntegrationRuntime|[Parameters](#ParametersIntegrationRuntimesCreateLinkedIntegrationRuntime)|[Example](#ExamplesIntegrationRuntimesCreateLinkedIntegrationRuntime)| +|[az datafactory integration-runtime managed create](#IntegrationRuntimesCreateOrUpdate#Create#Managed)|CreateOrUpdate#Create#Managed|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#Managed)|Not Found| +|[az datafactory integration-runtime self-hosted create](#IntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|CreateOrUpdate#Create#SelfHosted|[Parameters](#ParametersIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)|[Example](#ExamplesIntegrationRuntimesCreateOrUpdate#Create#SelfHosted)| +|[az datafactory integration-runtime update](#IntegrationRuntimesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimesUpdate)|[Example](#ExamplesIntegrationRuntimesUpdate)| +|[az datafactory integration-runtime delete](#IntegrationRuntimesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimesDelete)|[Example](#ExamplesIntegrationRuntimesDelete)| +|[az datafactory integration-runtime get-connection-info](#IntegrationRuntimesGetConnectionInfo)|GetConnectionInfo|[Parameters](#ParametersIntegrationRuntimesGetConnectionInfo)|[Example](#ExamplesIntegrationRuntimesGetConnectionInfo)| +|[az datafactory integration-runtime get-monitoring-data](#IntegrationRuntimesGetMonitoringData)|GetMonitoringData|[Parameters](#ParametersIntegrationRuntimesGetMonitoringData)|[Example](#ExamplesIntegrationRuntimesGetMonitoringData)| +|[az datafactory integration-runtime get-status](#IntegrationRuntimesGetStatus)|GetStatus|[Parameters](#ParametersIntegrationRuntimesGetStatus)|[Example](#ExamplesIntegrationRuntimesGetStatus)| +|[az datafactory integration-runtime list-auth-key](#IntegrationRuntimesListAuthKeys)|ListAuthKeys|[Parameters](#ParametersIntegrationRuntimesListAuthKeys)|[Example](#ExamplesIntegrationRuntimesListAuthKeys)| +|[az datafactory integration-runtime regenerate-auth-key](#IntegrationRuntimesRegenerateAuthKey)|RegenerateAuthKey|[Parameters](#ParametersIntegrationRuntimesRegenerateAuthKey)|[Example](#ExamplesIntegrationRuntimesRegenerateAuthKey)| +|[az datafactory integration-runtime remove-link](#IntegrationRuntimesRemoveLinks)|RemoveLinks|[Parameters](#ParametersIntegrationRuntimesRemoveLinks)|[Example](#ExamplesIntegrationRuntimesRemoveLinks)| +|[az datafactory integration-runtime start](#IntegrationRuntimesStart)|Start|[Parameters](#ParametersIntegrationRuntimesStart)|[Example](#ExamplesIntegrationRuntimesStart)| +|[az datafactory integration-runtime stop](#IntegrationRuntimesStop)|Stop|[Parameters](#ParametersIntegrationRuntimesStop)|[Example](#ExamplesIntegrationRuntimesStop)| +|[az datafactory integration-runtime sync-credentials](#IntegrationRuntimesSyncCredentials)|SyncCredentials|[Parameters](#ParametersIntegrationRuntimesSyncCredentials)|[Example](#ExamplesIntegrationRuntimesSyncCredentials)| +|[az datafactory integration-runtime upgrade](#IntegrationRuntimesUpgrade)|Upgrade|[Parameters](#ParametersIntegrationRuntimesUpgrade)|[Example](#ExamplesIntegrationRuntimesUpgrade)| + +### Commands in `az datafactory integration-runtime-node` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory integration-runtime-node show](#IntegrationRuntimeNodesGet)|Get|[Parameters](#ParametersIntegrationRuntimeNodesGet)|[Example](#ExamplesIntegrationRuntimeNodesGet)| +|[az datafactory integration-runtime-node update](#IntegrationRuntimeNodesUpdate)|Update|[Parameters](#ParametersIntegrationRuntimeNodesUpdate)|[Example](#ExamplesIntegrationRuntimeNodesUpdate)| +|[az datafactory integration-runtime-node delete](#IntegrationRuntimeNodesDelete)|Delete|[Parameters](#ParametersIntegrationRuntimeNodesDelete)|[Example](#ExamplesIntegrationRuntimeNodesDelete)| +|[az datafactory integration-runtime-node get-ip-address](#IntegrationRuntimeNodesGetIpAddress)|GetIpAddress|[Parameters](#ParametersIntegrationRuntimeNodesGetIpAddress)|[Example](#ExamplesIntegrationRuntimeNodesGetIpAddress)| + +### Commands in `az datafactory linked-service` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory linked-service list](#LinkedServicesListByFactory)|ListByFactory|[Parameters](#ParametersLinkedServicesListByFactory)|[Example](#ExamplesLinkedServicesListByFactory)| +|[az datafactory linked-service show](#LinkedServicesGet)|Get|[Parameters](#ParametersLinkedServicesGet)|[Example](#ExamplesLinkedServicesGet)| +|[az datafactory linked-service create](#LinkedServicesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Create)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Create)| +|[az datafactory linked-service update](#LinkedServicesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersLinkedServicesCreateOrUpdate#Update)|[Example](#ExamplesLinkedServicesCreateOrUpdate#Update)| +|[az datafactory linked-service delete](#LinkedServicesDelete)|Delete|[Parameters](#ParametersLinkedServicesDelete)|[Example](#ExamplesLinkedServicesDelete)| + +### Commands in `az datafactory pipeline` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline list](#PipelinesListByFactory)|ListByFactory|[Parameters](#ParametersPipelinesListByFactory)|[Example](#ExamplesPipelinesListByFactory)| +|[az datafactory pipeline show](#PipelinesGet)|Get|[Parameters](#ParametersPipelinesGet)|[Example](#ExamplesPipelinesGet)| +|[az datafactory pipeline create](#PipelinesCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersPipelinesCreateOrUpdate#Create)|[Example](#ExamplesPipelinesCreateOrUpdate#Create)| +|[az datafactory pipeline update](#PipelinesCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersPipelinesCreateOrUpdate#Update)|[Example](#ExamplesPipelinesCreateOrUpdate#Update)| +|[az datafactory pipeline delete](#PipelinesDelete)|Delete|[Parameters](#ParametersPipelinesDelete)|[Example](#ExamplesPipelinesDelete)| +|[az datafactory pipeline create-run](#PipelinesCreateRun)|CreateRun|[Parameters](#ParametersPipelinesCreateRun)|[Example](#ExamplesPipelinesCreateRun)| + +### Commands in `az datafactory pipeline-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory pipeline-run show](#PipelineRunsGet)|Get|[Parameters](#ParametersPipelineRunsGet)|[Example](#ExamplesPipelineRunsGet)| +|[az datafactory pipeline-run cancel](#PipelineRunsCancel)|Cancel|[Parameters](#ParametersPipelineRunsCancel)|[Example](#ExamplesPipelineRunsCancel)| +|[az datafactory pipeline-run query-by-factory](#PipelineRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersPipelineRunsQueryByFactory)|[Example](#ExamplesPipelineRunsQueryByFactory)| + +### Commands in `az datafactory trigger` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger list](#TriggersListByFactory)|ListByFactory|[Parameters](#ParametersTriggersListByFactory)|[Example](#ExamplesTriggersListByFactory)| +|[az datafactory trigger show](#TriggersGet)|Get|[Parameters](#ParametersTriggersGet)|[Example](#ExamplesTriggersGet)| +|[az datafactory trigger create](#TriggersCreateOrUpdate#Create)|CreateOrUpdate#Create|[Parameters](#ParametersTriggersCreateOrUpdate#Create)|[Example](#ExamplesTriggersCreateOrUpdate#Create)| +|[az datafactory trigger update](#TriggersCreateOrUpdate#Update)|CreateOrUpdate#Update|[Parameters](#ParametersTriggersCreateOrUpdate#Update)|[Example](#ExamplesTriggersCreateOrUpdate#Update)| +|[az datafactory trigger delete](#TriggersDelete)|Delete|[Parameters](#ParametersTriggersDelete)|[Example](#ExamplesTriggersDelete)| +|[az datafactory trigger get-event-subscription-status](#TriggersGetEventSubscriptionStatus)|GetEventSubscriptionStatus|[Parameters](#ParametersTriggersGetEventSubscriptionStatus)|[Example](#ExamplesTriggersGetEventSubscriptionStatus)| +|[az datafactory trigger query-by-factory](#TriggersQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggersQueryByFactory)|[Example](#ExamplesTriggersQueryByFactory)| +|[az datafactory trigger start](#TriggersStart)|Start|[Parameters](#ParametersTriggersStart)|[Example](#ExamplesTriggersStart)| +|[az datafactory trigger stop](#TriggersStop)|Stop|[Parameters](#ParametersTriggersStop)|[Example](#ExamplesTriggersStop)| +|[az datafactory trigger subscribe-to-event](#TriggersSubscribeToEvents)|SubscribeToEvents|[Parameters](#ParametersTriggersSubscribeToEvents)|[Example](#ExamplesTriggersSubscribeToEvents)| +|[az datafactory trigger unsubscribe-from-event](#TriggersUnsubscribeFromEvents)|UnsubscribeFromEvents|[Parameters](#ParametersTriggersUnsubscribeFromEvents)|[Example](#ExamplesTriggersUnsubscribeFromEvents)| + +### Commands in `az datafactory trigger-run` group +|CLI Command|Operation Swagger name|Parameters|Examples| +|---------|------------|--------|-----------| +|[az datafactory trigger-run cancel](#TriggerRunsCancel)|Cancel|[Parameters](#ParametersTriggerRunsCancel)|[Example](#ExamplesTriggerRunsCancel)| +|[az datafactory trigger-run query-by-factory](#TriggerRunsQueryByFactory)|QueryByFactory|[Parameters](#ParametersTriggerRunsQueryByFactory)|[Example](#ExamplesTriggerRunsQueryByFactory)| +|[az datafactory trigger-run rerun](#TriggerRunsRerun)|Rerun|[Parameters](#ParametersTriggerRunsRerun)|[Example](#ExamplesTriggerRunsRerun)| + + +## COMMAND DETAILS + +### group `az datafactory` +#### Command `az datafactory list` + +##### Example +``` +az datafactory list --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--run-id**|string|The pipeline run identifier.|run_id|runId| -|**--last-updated-after**|date-time|The time at or after which the run event was updated in 'ISO 8601' format.|last_updated_after|lastUpdatedAfter| -|**--last-updated-before**|date-time|The time at or before which the run event was updated in 'ISO 8601' format.|last_updated_before|lastUpdatedBefore| -|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| -|**--filters**|array|List of filters.|filters|filters| -|**--order-by**|array|List of OrderBy option.|order_by|orderBy| - -### datafactory dataset create - -create a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory list` -#### Parameters +##### Example +``` +az datafactory list +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--properties**|object|Dataset properties.|properties|properties| -|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory dataset delete +#### Command `az datafactory show` -delete a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| - -#### Parameters +##### Example +``` +az datafactory show --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| - -### datafactory dataset list - -list a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| +|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| +#### Command `az datafactory create` -#### Parameters +##### Example +``` +az datafactory create --location "East US" --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory dataset show - -show a datafactory dataset. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +|**--if-match**|string|ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--location**|string|The resource location.|location|location| +|**--tags**|dictionary|The resource tags.|tags|tags| +|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| +|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| +|**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| +|**--public-network-access**|choice|Whether or not public network access is allowed for the data factory.|public_network_access|publicNetworkAccess| +|**--key-name**|string|The name of the key in Azure Key Vault to use as Customer Managed Key.|key_name|keyName| +|**--vault-base-url**|string|The url of the Azure Key Vault used for CMK.|vault_base_url|vaultBaseUrl| +|**--key-version**|string|The version of the key used for CMK. If not provided, latest version will be used.|key_version|keyVersion| +|**--identity**|object|User assigned identity to use to authenticate to customer's key vault. If not provided Managed Service Identity will be used.|identity|identity| +|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities| + +#### Command `az datafactory update` + +##### Example +``` +az datafactory update --name "exampleFactoryName" --tags exampleTag="exampleValue" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory dataset update - -update a datafactory dataset. +|**--tags**|dictionary|The resource tags.|tags|tags| +|**--user-assigned-identities**|dictionary|List of user assigned identities for the factory.|user_assigned_identities|userAssignedIdentities| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory dataset|Datasets| +#### Command `az datafactory delete` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| - -#### Parameters +##### Example +``` +az datafactory delete --name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| -|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Dataset description.|description|description| -|**--structure**|any|Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement.|structure|structure| -|**--schema**|any|Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement.|schema|schema| -|**--parameters**|dictionary|Parameters for dataset.|parameters|parameters| -|**--annotations**|array|List of tags that can be used for describing the Dataset.|annotations|annotations| -|**--folder**|object|The folder that this Dataset is in. If not specified, Dataset will appear at the root level.|folder|folder| - -### datafactory factory configure-factory-repo -configure-factory-repo a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|configure-factory-repo|ConfigureFactoryRepo| +#### Command `az datafactory configure-factory-repo` -#### Parameters +##### Example +``` +az datafactory configure-factory-repo --factory-resource-id "/subscriptions/12345678-1234-1234-1234-12345678abc/resourc\ +eGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName" \ +--factory-vsts-configuration account-name="ADF" collaboration-branch="master" last-commit-id="" project-name="project" \ +repository-name="repo" root-folder="/" tenant-id="" --location "East US" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--location**|string|The location identifier.|location|locationId| @@ -163,67 +230,15 @@ configure-factory-repo a datafactory factory. |**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| |**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| -### datafactory factory create - -create a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--if-match**|string|ETag of the factory entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--location**|string|The resource location.|location|location| -|**--tags**|dictionary|The resource tags.|tags|tags| -|**--factory-vsts-configuration**|object|Factory's VSTS repo information.|factory_vsts_configuration|FactoryVSTSConfiguration| -|**--factory-git-hub-configuration**|object|Factory's GitHub repo information.|factory_git_hub_configuration|FactoryGitHubConfiguration| -|**--global-parameters**|dictionary|List of parameters for factory.|global_parameters|globalParameters| - -### datafactory factory delete - -delete a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory get-data-plane-access` -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory factory get-data-plane-access - -get-data-plane-access a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-data-plane-access|GetDataPlaneAccess| - -#### Parameters +##### Example +``` +az datafactory get-data-plane-access --name "exampleFactoryName" --access-resource-path "" --expire-time \ +"2018-11-10T09:46:20.2659347Z" --permissions "r" --profile-name "DefaultProfile" --start-time \ +"2018-11-10T02:46:20.2659347Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -234,21 +249,14 @@ get-data-plane-access a datafactory factory. |**--start-time**|string|Start time for the token. If not specified the current time will be used.|start_time|startTime| |**--expire-time**|string|Expiration time for the token. Maximum duration for the token is eight hours and by default the token will expire in eight hours.|expire_time|expireTime| -### datafactory factory get-git-hub-access-token - -get-git-hub-access-token a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-git-hub-access-token|GetGitHubAccessToken| +#### Command `az datafactory get-git-hub-access-token` -#### Parameters +##### Example +``` +az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" \ +--git-hub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -257,167 +265,151 @@ get-git-hub-access-token a datafactory factory. |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url|gitHubAccessTokenBaseUrl| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id|gitHubClientId| -### datafactory factory list +### group `az datafactory activity-run` +#### Command `az datafactory activity-run query-by-pipeline-run` -list a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByResourceGroup| -|list|List| - -#### Parameters +##### Example +``` +az datafactory activity-run query-by-pipeline-run --factory-name "exampleFactoryName" --last-updated-after \ +"2018-06-16T00:36:44.3345758Z" --last-updated-before "2018-06-16T00:49:48.3686473Z" --resource-group \ +"exampleResourceGroup" --run-id "2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| +|**--last-updated-after**|date-time|The time at or after which the run event was updated in 'ISO 8601' format.|last_updated_after|lastUpdatedAfter| +|**--last-updated-before**|date-time|The time at or before which the run event was updated in 'ISO 8601' format.|last_updated_before|lastUpdatedBefore| +|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| +|**--filters**|array|List of filters.|filters|filters| +|**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory factory show - -show a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +### group `az datafactory dataset` +#### Command `az datafactory dataset list` -#### Parameters +##### Example +``` +az datafactory dataset list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory factory update - -update a datafactory factory. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory factory|Factories| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| +#### Command `az datafactory dataset show` -#### Parameters +##### Example +``` +az datafactory dataset show --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--tags**|dictionary|The resource tags.|tags|tags| - -### datafactory integration-runtime delete - -delete a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--if-none-match**|string|ETag of the dataset entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory dataset create` + +##### Example +``` +az datafactory dataset create --properties "{\\"type\\":\\"AzureBlob\\",\\"linkedServiceName\\":{\\"type\\":\\"LinkedSe\ +rviceReference\\",\\"referenceName\\":\\"exampleLinkedService\\"},\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Str\ +ing\\"},\\"MyFolderPath\\":{\\"type\\":\\"String\\"}},\\"typeProperties\\":{\\"format\\":{\\"type\\":\\"TextFormat\\"},\ +\\"fileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@dataset().MyFileName\\"},\\"folderPath\\":{\\"type\\":\\"Ex\ +pression\\",\\"value\\":\\"@dataset().MyFolderPath\\"}}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| - -### datafactory integration-runtime get-connection-info - -get-connection-info a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--properties**|object|Dataset properties.|properties|properties| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-connection-info|GetConnectionInfo| +#### Command `az datafactory dataset update` -#### Parameters +##### Example +``` +az datafactory dataset update --description "Example description" --linked-service-name "{\\"type\\":\\"LinkedServiceRe\ +ference\\",\\"referenceName\\":\\"exampleLinkedService\\"}" --parameters "{\\"MyFileName\\":{\\"type\\":\\"String\\"},\ +\\"MyFolderPath\\":{\\"type\\":\\"String\\"}}" --name "exampleDataset" --factory-name "exampleFactoryName" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| - -### datafactory integration-runtime get-monitoring-data - -get-monitoring-data a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| +|**--linked-service-name**|object|Linked service reference.|linked_service_name|linkedServiceName| +|**--if-match**|string|ETag of the dataset entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Dataset description.|description|description| +|**--structure**|any|Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement.|structure|structure| +|**--schema**|any|Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement.|schema|schema| +|**--parameters**|dictionary|Parameters for dataset.|parameters|parameters| +|**--annotations**|array|List of tags that can be used for describing the Dataset.|annotations|annotations| +|**--folder**|object|The folder that this Dataset is in. If not specified, Dataset will appear at the root level.|folder|folder| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-monitoring-data|GetMonitoringData| +#### Command `az datafactory dataset delete` -#### Parameters +##### Example +``` +az datafactory dataset delete --name "exampleDataset" --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| - -### datafactory integration-runtime get-status +|**--dataset-name**|string|The dataset name.|dataset_name|datasetName| -get-status a datafactory integration-runtime. +### group `az datafactory integration-runtime` +#### Command `az datafactory integration-runtime list` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +##### Example +``` +az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-status|GetStatus| +#### Command `az datafactory integration-runtime show` -#### Parameters +##### Example +``` +az datafactory integration-runtime show --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory integration-runtime linked-integration-runtime create - -linked-integration-runtime create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|linked-integration-runtime create|CreateLinkedIntegrationRuntime| +#### Command `az datafactory integration-runtime linked-integration-runtime create` -#### Parameters +##### Example +``` +az datafactory integration-runtime linked-integration-runtime create --name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" \ +--location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" --subscription-id \ +"061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -428,328 +420,218 @@ linked-integration-runtime create a datafactory integration-runtime. |**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name|dataFactoryName| |**--location**|string|The location of the data factory that the linked integration runtime belongs to.|location|dataFactoryLocation| -### datafactory integration-runtime list +#### Command `az datafactory integration-runtime managed create` -list a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory integration-runtime list-auth-key - -list-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list-auth-key|ListAuthKeys| - -#### Parameters +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|managed_description|description| +|**--managed-virtual-network**|object|Managed Virtual Network reference.|managed_managed_virtual_network|managedVirtualNetwork| +|**--compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| +|**--ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| + +#### Command `az datafactory integration-runtime self-hosted create` + +##### Example +``` +az datafactory integration-runtime self-hosted create --factory-name "exampleFactoryName" --description "A selfhosted \ +integration runtime" --name "exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Integration runtime description.|self_hosted_description|description| +|**--linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| -### datafactory integration-runtime managed create - -managed create a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|managed create|CreateOrUpdate#Create#Managed| +#### Command `az datafactory integration-runtime update` -#### Parameters +##### Example +``` +az datafactory integration-runtime update --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|managed_description|description| -|**--type-properties-compute-properties**|object|The compute resource for managed integration runtime.|managed_compute_properties|computeProperties| -|**--type-properties-ssis-properties**|object|SSIS properties for managed integration runtime.|managed_ssis_properties|ssisProperties| - -### datafactory integration-runtime regenerate-auth-key - -regenerate-auth-key a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| +|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| +|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|regenerate-auth-key|RegenerateAuthKey| +#### Command `az datafactory integration-runtime delete` -#### Parameters +##### Example +``` +az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| - -### datafactory integration-runtime remove-link -remove-link a datafactory integration-runtime. +#### Command `az datafactory integration-runtime get-connection-info` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|remove-link|RemoveLinks| - -#### Parameters +##### Example +``` +az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| - -### datafactory integration-runtime self-hosted create - -self-hosted create a datafactory integration-runtime. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|self-hosted create|CreateOrUpdate#Create#SelfHosted| +#### Command `az datafactory integration-runtime get-monitoring-data` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Integration runtime description.|self_hosted_description|description| -|**--type-properties-linked-info**|object|The base definition of a linked integration runtime.|self_hosted_linked_info|linkedInfo| - -### datafactory integration-runtime show - -show a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory integration-runtime get-status` -#### Parameters +##### Example +``` +az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory integration-runtime start -start a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| +#### Command `az datafactory integration-runtime list-auth-key` -#### Parameters +##### Example +``` +az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime stop +#### Command `az datafactory integration-runtime regenerate-auth-key` -stop a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --key-name "authKey2" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--key-name**|choice|The name of the authentication key to regenerate.|key_name|keyName| -### datafactory integration-runtime sync-credentials - -sync-credentials a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|sync-credentials|SyncCredentials| +#### Command `az datafactory integration-runtime remove-link` -#### Parameters +##### Example +``` +az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--linked-factory-name "exampleFactoryName-linked" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name|linkedFactoryName| -### datafactory integration-runtime update - -update a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| +#### Command `az datafactory integration-runtime start` -#### Parameters +##### Example +``` +az datafactory integration-runtime start --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--auto-update**|choice|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update|autoUpdate| -|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset|updateDelayOffset| - -### datafactory integration-runtime upgrade - -upgrade a datafactory integration-runtime. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime|IntegrationRuntimes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|upgrade|Upgrade| +#### Command `az datafactory integration-runtime stop` -#### Parameters +##### Example +``` +az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name "exampleManagedIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -### datafactory integration-runtime-node delete - -delete a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory integration-runtime sync-credentials` -#### Parameters +##### Example +``` +az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name \ +"exampleIntegrationRuntime" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--node-name**|string|The integration runtime node name.|node_name|nodeName| - -### datafactory integration-runtime-node get-ip-address -get-ip-address a datafactory integration-runtime-node. +#### Command `az datafactory integration-runtime upgrade` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-ip-address|GetIpAddress| - -#### Parameters +##### Example +``` +az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name "exampleIntegrationRuntime" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| -|**--node-name**|string|The integration runtime node name.|node_name|nodeName| - -### datafactory integration-runtime-node show - -show a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +### group `az datafactory integration-runtime-node` +#### Command `az datafactory integration-runtime-node show` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node show --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -757,21 +639,14 @@ show a datafactory integration-runtime-node. |**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| |**--node-name**|string|The integration runtime node name.|node_name|nodeName| -### datafactory integration-runtime-node update - -update a datafactory integration-runtime-node. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory integration-runtime-node|IntegrationRuntimeNodes| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|Update| +#### Command `az datafactory integration-runtime-node update` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node update --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" --concurrent-jobs-limit 2 +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -780,85 +655,57 @@ update a datafactory integration-runtime-node. |**--node-name**|string|The integration runtime node name.|node_name|nodeName| |**--concurrent-jobs-limit**|integer|The number of concurrent jobs permitted to run on the integration runtime node. Values between 1 and maxConcurrentJobs(inclusive) are allowed.|concurrent_jobs_limit|concurrentJobsLimit| -### datafactory linked-service create - -create a datafactory linked-service. +#### Command `az datafactory integration-runtime-node delete` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters +##### Example +``` +az datafactory integration-runtime-node delete --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -|**--properties**|object|Properties of linked service.|properties|properties| -|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory linked-service delete - -delete a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--node-name**|string|The integration runtime node name.|node_name|nodeName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory integration-runtime-node get-ip-address` -#### Parameters +##### Example +``` +az datafactory integration-runtime-node get-ip-address --factory-name "exampleFactoryName" --integration-runtime-name \ +"exampleIntegrationRuntime" --node-name "Node_1" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| - -### datafactory linked-service list - -list a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name|integrationRuntimeName| +|**--node-name**|string|The integration runtime node name.|node_name|nodeName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| +### group `az datafactory linked-service` +#### Command `az datafactory linked-service list` -#### Parameters +##### Example +``` +az datafactory linked-service list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -### datafactory linked-service show +#### Command `az datafactory linked-service show` -show a datafactory linked-service. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory linked-service show --factory-name "exampleFactoryName" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -866,21 +713,32 @@ show a datafactory linked-service. |**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| |**--if-none-match**|string|ETag of the linked service entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory linked-service update - -update a datafactory linked-service. +#### Command `az datafactory linked-service create` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory linked-service|LinkedServices| +##### Example +``` +az datafactory linked-service create --factory-name "exampleFactoryName" --properties "{\\"type\\":\\"AzureStorage\\",\ +\\"typeProperties\\":{\\"connectionString\\":{\\"type\\":\\"SecureString\\",\\"value\\":\\"DefaultEndpointsProtocol=htt\ +ps;AccountName=examplestorageaccount;AccountKey=\\"}}}" --name "exampleLinkedService" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| +|**--properties**|object|Properties of linked service.|properties|properties| +|**--if-match**|string|ETag of the linkedService entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +#### Command `az datafactory linked-service update` -#### Parameters +##### Example +``` +az datafactory linked-service update --factory-name "exampleFactoryName" --description "Example description" --name \ +"exampleLinkedService" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -892,133 +750,90 @@ update a datafactory linked-service. |**--parameters**|dictionary|Parameters for linked service.|parameters|parameters| |**--annotations**|array|List of tags that can be used for describing the linked service.|annotations|annotations| -### datafactory pipeline create - -create a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| +#### Command `az datafactory linked-service delete` -#### Parameters +##### Example +``` +az datafactory linked-service delete --factory-name "exampleFactoryName" --name "exampleLinkedService" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| -|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory pipeline create-run - -create-run a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| +|**--linked-service-name**|string|The linked service name.|linked_service_name|linkedServiceName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create-run|CreateRun| +### group `az datafactory pipeline` +#### Command `az datafactory pipeline list` -#### Parameters +##### Example +``` +az datafactory pipeline list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| -|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| -|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| -|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| -|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| - -### datafactory pipeline delete - -delete a datafactory pipeline. -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +#### Command `az datafactory pipeline show` -#### Parameters +##### Example +``` +az datafactory pipeline show --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| +|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory pipeline list - -list a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory pipeline show - -show a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +#### Command `az datafactory pipeline create` + +##### Example +``` +az datafactory pipeline create --factory-name "exampleFactoryName" --pipeline "{\\"activities\\":[{\\"name\\":\\"Exampl\ +eForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\":\\"ExampleCopyActivity\ +\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":\\"exampl\ +econtainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"outputs\\":[\ +{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@item\ +()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\"typeProperties\\":{\\"d\ +ataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"BlobSource\\"}}}],\\"isSeq\ +uential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.OutputBlobNameList\\"}}}\ +],\\"parameters\\":{\\"JobId\\":{\\"type\\":\\"String\\"},\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}},\\"variabl\ +es\\":{\\"TestVariableArray\\":{\\"type\\":\\"Array\\"}},\\"runDimensions\\":{\\"JobId\\":{\\"type\\":\\"Expression\\",\ +\\"value\\":\\"@pipeline().parameters.JobId\\"}},\\"duration\\":\\"0.00:10:00\\"}" --name "examplePipeline" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -|**--if-none-match**|string|ETag of the pipeline entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| - -### datafactory pipeline update - -update a datafactory pipeline. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline|Pipelines| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| +|**--pipeline**|object|Pipeline resource definition.|pipeline|pipeline| +|**--if-match**|string|ETag of the pipeline entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -#### Parameters +#### Command `az datafactory pipeline update` + +##### Example +``` +az datafactory pipeline update --factory-name "exampleFactoryName" --description "Example description" --activities \ +"[{\\"name\\":\\"ExampleForeachActivity\\",\\"type\\":\\"ForEach\\",\\"typeProperties\\":{\\"activities\\":[{\\"name\\"\ +:\\"ExampleCopyActivity\\",\\"type\\":\\"Copy\\",\\"inputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"\ +MyFileName\\":\\"examplecontainer.csv\\",\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDatas\ +et\\"}],\\"outputs\\":[{\\"type\\":\\"DatasetReference\\",\\"parameters\\":{\\"MyFileName\\":{\\"type\\":\\"Expression\ +\\",\\"value\\":\\"@item()\\"},\\"MyFolderPath\\":\\"examplecontainer\\"},\\"referenceName\\":\\"exampleDataset\\"}],\\\ +"typeProperties\\":{\\"dataIntegrationUnits\\":32,\\"sink\\":{\\"type\\":\\"BlobSink\\"},\\"source\\":{\\"type\\":\\"Bl\ +obSource\\"}}}],\\"isSequential\\":true,\\"items\\":{\\"type\\":\\"Expression\\",\\"value\\":\\"@pipeline().parameters.\ +OutputBlobNameList\\"}}}]" --parameters "{\\"OutputBlobNameList\\":{\\"type\\":\\"Array\\"}}" --duration "0.00:10:00" \ +--pipeline-name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1032,45 +847,81 @@ update a datafactory pipeline. |**--concurrency**|integer|The max number of concurrent runs for the pipeline.|concurrency|concurrency| |**--annotations**|array|List of tags that can be used for describing the Pipeline.|annotations|annotations| |**--run-dimensions**|dictionary|Dimensions emitted by Pipeline.|run_dimensions|runDimensions| -|**--folder-name**|string|The name of the folder that this Pipeline is in.|name|name| +|**--duration**|any|TimeSpan value, after which an Azure Monitoring Metric is fired.|duration|duration| +|**--name**|string|The name of the folder that this Pipeline is in.|name|name| + +#### Command `az datafactory pipeline delete` -### datafactory pipeline-run cancel +##### Example +``` +az datafactory pipeline delete --factory-name "exampleFactoryName" --name "examplePipeline" --resource-group \ +"exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| -cancel a datafactory pipeline-run. +#### Command `az datafactory pipeline create-run` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +##### Example +``` +az datafactory pipeline create-run --factory-name "exampleFactoryName" --parameters "{\\"OutputBlobNameList\\":[\\"exam\ +pleoutput.csv\\"]}" --name "examplePipeline" --resource-group "exampleResourceGroup" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--pipeline-name**|string|The pipeline name.|pipeline_name|pipelineName| +|**--reference-pipeline-run-id**|string|The pipeline run identifier. If run ID is specified the parameters of the specified run will be used to create a new run.|reference_pipeline_run_id|referencePipelineRunId| +|**--is-recovery**|boolean|Recovery mode flag. If recovery mode is set to true, the specified referenced pipeline run and the new run will be grouped under the same groupId.|is_recovery|isRecovery| +|**--start-activity-name**|string|In recovery mode, the rerun will start from this activity. If not specified, all activities will run.|start_activity_name|startActivityName| +|**--start-from-failure**|boolean|In recovery mode, if set to true, the rerun will start from failed activities. The property will be used only if startActivityName is not specified.|start_from_failure|startFromFailure| +|**--parameters**|dictionary|Parameters of the pipeline run. These parameters will be used only if the runId is not specified.|parameters|parameters| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| +### group `az datafactory pipeline-run` +#### Command `az datafactory pipeline-run show` -#### Parameters +##### Example +``` +az datafactory pipeline-run show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--run-id**|string|The pipeline run identifier.|run_id|runId| -|**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| - -### datafactory pipeline-run query-by-factory -query-by-factory a datafactory pipeline-run. +#### Command `az datafactory pipeline-run cancel` -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| +##### Example +``` +az datafactory pipeline-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--run-id "16ac5348-ff82-4f95-a80d-638c1d47b721" +``` +##### Parameters +|Option|Type|Description|Path (SDK)|Swagger name| +|------|----|-----------|----------|------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| +|**--factory-name**|string|The factory name.|factory_name|factoryName| +|**--run-id**|string|The pipeline run identifier.|run_id|runId| +|**--is-recursive**|boolean|If true, cancel all the Child pipelines that are triggered by the current pipeline.|is_recursive|isRecursive| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory pipeline-run query-by-factory` -#### Parameters +##### Example +``` +az datafactory pipeline-run query-by-factory --factory-name "exampleFactoryName" --filters operand="PipelineName" \ +operator="Equals" values="examplePipeline" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1081,279 +932,178 @@ query-by-factory a datafactory pipeline-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory pipeline-run show +### group `az datafactory trigger` +#### Command `az datafactory trigger list` -show a datafactory pipeline-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory pipeline-run|PipelineRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| - -#### Parameters +##### Example +``` +az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--run-id**|string|The pipeline run identifier.|run_id|runId| -### datafactory trigger create +#### Command `az datafactory trigger show` -create a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|create|CreateOrUpdate#Create| - -#### Parameters +##### Example +``` +az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--properties**|object|Properties of the trigger.|properties|properties| -|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| - -### datafactory trigger delete - -delete a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|delete|Delete| +|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -#### Parameters +#### Command `az datafactory trigger create` + +##### Example +``` +az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --properties \ +"{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\\ +"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\"}}],\\"typePro\ +perties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\ +\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--properties**|object|Properties of the trigger.|properties|properties| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -### datafactory trigger get-event-subscription-status - -get-event-subscription-status a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|get-event-subscription-status|GetEventSubscriptionStatus| +#### Command `az datafactory trigger update` -#### Parameters +##### Example +``` +az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--description "Example description" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| +|**--description**|string|Trigger description.|description|description| +|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger list - -list a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|list|ListByFactory| - -#### Parameters -|Option|Type|Description|Path (SDK)|Swagger name| -|------|----|-----------|----------|------------| -|**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| -|**--factory-name**|string|The factory name.|factory_name|factoryName| - -### datafactory trigger query-by-factory - -query-by-factory a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| +#### Command `az datafactory trigger delete` -#### Parameters +##### Example +``` +az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| -|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| - -### datafactory trigger show - -show a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|show|Get| +#### Command `az datafactory trigger get-event-subscription-status` -#### Parameters +##### Example +``` +az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match|If-None-Match| -### datafactory trigger start +#### Command `az datafactory trigger query-by-factory` -start a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|start|Start| - -#### Parameters +##### Example +``` +az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name "exampleTrigger" \ +--resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| -|**--trigger-name**|string|The trigger name.|trigger_name|triggerName| - -### datafactory trigger stop - -stop a datafactory trigger. +|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token|continuationToken| +|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name|parentTriggerName| -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| +#### Command `az datafactory trigger start` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|stop|Stop| - -#### Parameters +##### Example +``` +az datafactory trigger start --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger subscribe-to-event - -subscribe-to-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|subscribe-to-event|SubscribeToEvents| +#### Command `az datafactory trigger stop` -#### Parameters +##### Example +``` +az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --name \ +"exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger unsubscribe-from-event - -unsubscribe-from-event a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|unsubscribe-from-event|UnsubscribeFromEvents| +#### Command `az datafactory trigger subscribe-to-event` -#### Parameters +##### Example +``` +az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -### datafactory trigger update +#### Command `az datafactory trigger unsubscribe-from-event` -update a datafactory trigger. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger|Triggers| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|update|CreateOrUpdate#Update| - -#### Parameters +##### Example +``` +az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group \ +"exampleResourceGroup" --name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| |**--factory-name**|string|The factory name.|factory_name|factoryName| |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| -|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match|If-Match| -|**--description**|string|Trigger description.|description|description| -|**--annotations**|array|List of tags that can be used for describing the trigger.|annotations|annotations| -### datafactory trigger-run cancel +### group `az datafactory trigger-run` +#### Command `az datafactory trigger-run cancel` -cancel a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|cancel|Cancel| - -#### Parameters +##### Example +``` +az datafactory trigger-run cancel --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1361,21 +1111,15 @@ cancel a datafactory trigger-run. |**--trigger-name**|string|The trigger name.|trigger_name|triggerName| |**--run-id**|string|The pipeline run identifier.|run_id|runId| -### datafactory trigger-run query-by-factory - -query-by-factory a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| +#### Command `az datafactory trigger-run query-by-factory` -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|query-by-factory|QueryByFactory| - -#### Parameters +##### Example +``` +az datafactory trigger-run query-by-factory --factory-name "exampleFactoryName" --filters operand="TriggerName" \ +operator="Equals" values="exampleTrigger" --last-updated-after "2018-06-16T00:36:44.3345758Z" --last-updated-before \ +"2018-06-16T00:49:48.3686473Z" --resource-group "exampleResourceGroup" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| @@ -1386,21 +1130,14 @@ query-by-factory a datafactory trigger-run. |**--filters**|array|List of filters.|filters|filters| |**--order-by**|array|List of OrderBy option.|order_by|orderBy| -### datafactory trigger-run rerun - -rerun a datafactory trigger-run. - -#### Command group -|Name (az)|Swagger name| -|---------|------------| -|datafactory trigger-run|TriggerRuns| - -#### Methods -|Name (az)|Swagger name| -|---------|------------| -|rerun|Rerun| +#### Command `az datafactory trigger-run rerun` -#### Parameters +##### Example +``` +az datafactory trigger-run rerun --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" --run-id \ +"2f7fdb90-5df1-4b8e-ac2f-064cfa58202b" --trigger-name "exampleTrigger" +``` +##### Parameters |Option|Type|Description|Path (SDK)|Swagger name| |------|----|-----------|----------|------------| |**--resource-group-name**|string|The resource group name.|resource_group_name|resourceGroupName| diff --git a/src/datafactory/setup.py b/src/datafactory/setup.py index 116e5e2b8cc..a26a3db9b84 100644 --- a/src/datafactory/setup.py +++ b/src/datafactory/setup.py @@ -10,7 +10,7 @@ from setuptools import setup, find_packages # HISTORY.rst entry. -VERSION = '0.2.0' +VERSION = '0.1.0' try: from azext_datafactory.manual.version import VERSION except ImportError: @@ -33,7 +33,7 @@ DEPENDENCIES = [] try: - from .manual.dependency import DEPENDENCIES + from azext_datafactory.manual.dependency import DEPENDENCIES except ImportError: pass